blob: 6b648ee23cc1554c60aadb983deb33c2aedb98d7 [file] [log] [blame]
Owen Andersone3590582007-08-02 18:11:11 +00001//===- DeadStoreElimination.cpp - Fast Dead Store Elimination -------------===//
Owen Anderson5e72db32007-07-11 00:46:18 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Owen Anderson5e72db32007-07-11 00:46:18 +00007//
8//===----------------------------------------------------------------------===//
9//
Chad Rosierd7634fc2015-12-11 18:39:41 +000010// This file implements a trivial dead store elimination that only considers
11// basic-block local redundant stores.
12//
13// FIXME: This should eventually be extended to be a post-dominator tree
14// traversal. Doing so would be pretty trivial.
Owen Anderson5e72db32007-07-11 00:46:18 +000015//
16//===----------------------------------------------------------------------===//
17
Justin Bogner594e07b2016-05-17 21:38:13 +000018#include "llvm/Transforms/Scalar/DeadStoreElimination.h"
Hal Finkela1271032016-06-23 13:46:39 +000019#include "llvm/ADT/DenseMap.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000020#include "llvm/ADT/STLExtras.h"
21#include "llvm/ADT/SetVector.h"
22#include "llvm/ADT/Statistic.h"
Owen Andersonaa071722007-07-11 23:19:17 +000023#include "llvm/Analysis/AliasAnalysis.h"
Nick Lewycky32f80512011-10-22 21:59:35 +000024#include "llvm/Analysis/CaptureTracking.h"
Chandler Carruth7b560d42015-09-09 17:55:00 +000025#include "llvm/Analysis/GlobalsModRef.h"
Victor Hernandezf390e042009-10-27 20:05:49 +000026#include "llvm/Analysis/MemoryBuiltins.h"
Owen Anderson5e72db32007-07-11 00:46:18 +000027#include "llvm/Analysis/MemoryDependenceAnalysis.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000028#include "llvm/Analysis/TargetLibraryInfo.h"
Chris Lattnerc0f33792010-11-30 23:05:20 +000029#include "llvm/Analysis/ValueTracking.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000030#include "llvm/IR/Constants.h"
31#include "llvm/IR/DataLayout.h"
Chandler Carruth5ad5f152014-01-13 09:26:24 +000032#include "llvm/IR/Dominators.h"
Chandler Carruth9fb823b2013-01-02 11:36:10 +000033#include "llvm/IR/Function.h"
34#include "llvm/IR/GlobalVariable.h"
35#include "llvm/IR/Instructions.h"
36#include "llvm/IR/IntrinsicInst.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000037#include "llvm/Pass.h"
Hal Finkela1271032016-06-23 13:46:39 +000038#include "llvm/Support/CommandLine.h"
Chandler Carruthed0881b2012-12-03 16:50:05 +000039#include "llvm/Support/Debug.h"
Benjamin Kramer799003b2015-03-23 19:32:43 +000040#include "llvm/Support/raw_ostream.h"
Justin Bogner594e07b2016-05-17 21:38:13 +000041#include "llvm/Transforms/Scalar.h"
Owen Anderson5e72db32007-07-11 00:46:18 +000042#include "llvm/Transforms/Utils/Local.h"
Hal Finkela1271032016-06-23 13:46:39 +000043#include <map>
Owen Anderson5e72db32007-07-11 00:46:18 +000044using namespace llvm;
45
Chandler Carruth964daaa2014-04-22 02:55:47 +000046#define DEBUG_TYPE "dse"
47
Erik Eckstein11fc8172015-08-13 15:36:11 +000048STATISTIC(NumRedundantStores, "Number of redundant stores deleted");
Owen Anderson5e72db32007-07-11 00:46:18 +000049STATISTIC(NumFastStores, "Number of stores deleted");
50STATISTIC(NumFastOther , "Number of other instrs removed");
Hal Finkela1271032016-06-23 13:46:39 +000051STATISTIC(NumCompletePartials, "Number of stores dead by later partials");
52
53static cl::opt<bool>
54EnablePartialOverwriteTracking("enable-dse-partial-overwrite-tracking",
55 cl::init(true), cl::Hidden,
56 cl::desc("Enable partial-overwrite tracking in DSE"));
Owen Anderson5e72db32007-07-11 00:46:18 +000057
Owen Anderson5e72db32007-07-11 00:46:18 +000058
Chris Lattner67122512010-11-30 21:58:14 +000059//===----------------------------------------------------------------------===//
60// Helper functions
61//===----------------------------------------------------------------------===//
62
Chad Rosiera8bc5122016-06-10 17:58:01 +000063/// Delete this instruction. Before we do, go through and zero out all the
Justin Bogner594e07b2016-05-17 21:38:13 +000064/// operands of this instruction. If any of them become dead, delete them and
65/// the computation tree that feeds them.
Eric Christopher0efe9f62015-08-19 02:15:13 +000066/// If ValueSet is non-null, remove any deleted instructions from it as well.
Justin Bogner594e07b2016-05-17 21:38:13 +000067static void
68deleteDeadInstruction(Instruction *I, MemoryDependenceResults &MD,
69 const TargetLibraryInfo &TLI,
70 SmallSetVector<Value *, 16> *ValueSet = nullptr) {
Eric Christopher0efe9f62015-08-19 02:15:13 +000071 SmallVector<Instruction*, 32> NowDeadInsts;
72
73 NowDeadInsts.push_back(I);
74 --NumFastOther;
75
76 // Before we touch this instruction, remove it from memdep!
77 do {
78 Instruction *DeadInst = NowDeadInsts.pop_back_val();
79 ++NumFastOther;
80
81 // This instruction is dead, zap it, in stages. Start by removing it from
82 // MemDep, which needs to know the operands and needs it to be in the
83 // function.
84 MD.removeInstruction(DeadInst);
85
86 for (unsigned op = 0, e = DeadInst->getNumOperands(); op != e; ++op) {
87 Value *Op = DeadInst->getOperand(op);
88 DeadInst->setOperand(op, nullptr);
89
90 // If this operand just became dead, add it to the NowDeadInsts list.
91 if (!Op->use_empty()) continue;
92
93 if (Instruction *OpI = dyn_cast<Instruction>(Op))
94 if (isInstructionTriviallyDead(OpI, &TLI))
95 NowDeadInsts.push_back(OpI);
96 }
97
98 DeadInst->eraseFromParent();
99
100 if (ValueSet) ValueSet->remove(DeadInst);
101 } while (!NowDeadInsts.empty());
102}
103
Justin Bogner594e07b2016-05-17 21:38:13 +0000104/// Does this instruction write some memory? This only returns true for things
105/// that we can analyze with other helpers below.
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000106static bool hasMemoryWrite(Instruction *I, const TargetLibraryInfo &TLI) {
Nick Lewycky90271472009-11-10 06:46:40 +0000107 if (isa<StoreInst>(I))
108 return true;
109 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
110 switch (II->getIntrinsicID()) {
Chris Lattner2764b4d2009-12-02 06:35:55 +0000111 default:
112 return false;
113 case Intrinsic::memset:
114 case Intrinsic::memmove:
115 case Intrinsic::memcpy:
116 case Intrinsic::init_trampoline:
117 case Intrinsic::lifetime_end:
118 return true;
Nick Lewycky90271472009-11-10 06:46:40 +0000119 }
120 }
Benjamin Kramer3a09ef62015-04-10 14:50:08 +0000121 if (auto CS = CallSite(I)) {
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000122 if (Function *F = CS.getCalledFunction()) {
Chad Rosier624fee52016-06-16 17:06:04 +0000123 StringRef FnName = F->getName();
124 if (TLI.has(LibFunc::strcpy) && FnName == TLI.getName(LibFunc::strcpy))
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000125 return true;
Chad Rosier624fee52016-06-16 17:06:04 +0000126 if (TLI.has(LibFunc::strncpy) && FnName == TLI.getName(LibFunc::strncpy))
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000127 return true;
Chad Rosier624fee52016-06-16 17:06:04 +0000128 if (TLI.has(LibFunc::strcat) && FnName == TLI.getName(LibFunc::strcat))
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000129 return true;
Chad Rosier624fee52016-06-16 17:06:04 +0000130 if (TLI.has(LibFunc::strncat) && FnName == TLI.getName(LibFunc::strncat))
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000131 return true;
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000132 }
133 }
Nick Lewycky90271472009-11-10 06:46:40 +0000134 return false;
135}
136
Justin Bogner594e07b2016-05-17 21:38:13 +0000137/// Return a Location stored to by the specified instruction. If isRemovable
138/// returns true, this function and getLocForRead completely describe the memory
139/// operations for this instruction.
Chandler Carruthac80dc72015-06-17 07:18:54 +0000140static MemoryLocation getLocForWrite(Instruction *Inst, AliasAnalysis &AA) {
Chris Lattner58b779e2010-11-30 07:23:21 +0000141 if (StoreInst *SI = dyn_cast<StoreInst>(Inst))
Chandler Carruth70c61c12015-06-04 02:03:15 +0000142 return MemoryLocation::get(SI);
Owen Anderson58704ee2011-09-06 18:14:09 +0000143
Chris Lattner58b779e2010-11-30 07:23:21 +0000144 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(Inst)) {
145 // memcpy/memmove/memset.
Chandler Carruthac80dc72015-06-17 07:18:54 +0000146 MemoryLocation Loc = MemoryLocation::getForDest(MI);
Chris Lattner58b779e2010-11-30 07:23:21 +0000147 return Loc;
148 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000149
Chris Lattner58b779e2010-11-30 07:23:21 +0000150 IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst);
Chandler Carruthac80dc72015-06-17 07:18:54 +0000151 if (!II)
152 return MemoryLocation();
Owen Anderson58704ee2011-09-06 18:14:09 +0000153
Chris Lattner58b779e2010-11-30 07:23:21 +0000154 switch (II->getIntrinsicID()) {
Chandler Carruthac80dc72015-06-17 07:18:54 +0000155 default:
156 return MemoryLocation(); // Unhandled intrinsic.
Chris Lattner58b779e2010-11-30 07:23:21 +0000157 case Intrinsic::init_trampoline:
Chris Lattner58b779e2010-11-30 07:23:21 +0000158 // FIXME: We don't know the size of the trampoline, so we can't really
159 // handle it here.
Chandler Carruthac80dc72015-06-17 07:18:54 +0000160 return MemoryLocation(II->getArgOperand(0));
Chris Lattner58b779e2010-11-30 07:23:21 +0000161 case Intrinsic::lifetime_end: {
162 uint64_t Len = cast<ConstantInt>(II->getArgOperand(0))->getZExtValue();
Chandler Carruthac80dc72015-06-17 07:18:54 +0000163 return MemoryLocation(II->getArgOperand(1), Len);
Chris Lattner58b779e2010-11-30 07:23:21 +0000164 }
165 }
166}
167
Justin Bogner594e07b2016-05-17 21:38:13 +0000168/// Return the location read by the specified "hasMemoryWrite" instruction if
169/// any.
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000170static MemoryLocation getLocForRead(Instruction *Inst,
171 const TargetLibraryInfo &TLI) {
172 assert(hasMemoryWrite(Inst, TLI) && "Unknown instruction case");
Owen Anderson58704ee2011-09-06 18:14:09 +0000173
Chris Lattner94fbdf32010-12-06 01:48:06 +0000174 // The only instructions that both read and write are the mem transfer
175 // instructions (memcpy/memmove).
176 if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(Inst))
Chandler Carruth70c61c12015-06-04 02:03:15 +0000177 return MemoryLocation::getForSource(MTI);
Chandler Carruthac80dc72015-06-17 07:18:54 +0000178 return MemoryLocation();
Chris Lattner94fbdf32010-12-06 01:48:06 +0000179}
180
Justin Bogner594e07b2016-05-17 21:38:13 +0000181/// If the value of this instruction and the memory it writes to is unused, may
182/// we delete this instruction?
Chris Lattner3590ef82010-11-30 05:30:45 +0000183static bool isRemovable(Instruction *I) {
Eli Friedman9a468152011-08-17 22:22:24 +0000184 // Don't remove volatile/atomic stores.
Nick Lewycky90271472009-11-10 06:46:40 +0000185 if (StoreInst *SI = dyn_cast<StoreInst>(I))
Eli Friedman9a468152011-08-17 22:22:24 +0000186 return SI->isUnordered();
Owen Anderson58704ee2011-09-06 18:14:09 +0000187
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000188 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
189 switch (II->getIntrinsicID()) {
190 default: llvm_unreachable("doesn't pass 'hasMemoryWrite' predicate");
191 case Intrinsic::lifetime_end:
192 // Never remove dead lifetime_end's, e.g. because it is followed by a
193 // free.
194 return false;
195 case Intrinsic::init_trampoline:
196 // Always safe to remove init_trampoline.
197 return true;
Owen Anderson58704ee2011-09-06 18:14:09 +0000198
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000199 case Intrinsic::memset:
200 case Intrinsic::memmove:
201 case Intrinsic::memcpy:
202 // Don't remove volatile memory intrinsics.
203 return !cast<MemIntrinsic>(II)->isVolatile();
204 }
Chris Lattnerb63ba732010-11-30 19:12:10 +0000205 }
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000206
Benjamin Kramer3a09ef62015-04-10 14:50:08 +0000207 if (auto CS = CallSite(I))
Nick Lewycky42bca052012-09-25 01:55:59 +0000208 return CS.getInstruction()->use_empty();
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000209
210 return false;
Nick Lewycky90271472009-11-10 06:46:40 +0000211}
212
Pete Cooper856977c2011-11-09 23:07:35 +0000213
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000214/// Returns true if the end of this instruction can be safely shortened in
Pete Cooper856977c2011-11-09 23:07:35 +0000215/// length.
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000216static bool isShortenableAtTheEnd(Instruction *I) {
Pete Cooper856977c2011-11-09 23:07:35 +0000217 // Don't shorten stores for now
218 if (isa<StoreInst>(I))
219 return false;
Nadav Rotem465834c2012-07-24 10:51:42 +0000220
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000221 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
222 switch (II->getIntrinsicID()) {
223 default: return false;
224 case Intrinsic::memset:
225 case Intrinsic::memcpy:
226 // Do shorten memory intrinsics.
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000227 // FIXME: Add memmove if it's also safe to transform.
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000228 return true;
229 }
Pete Cooper856977c2011-11-09 23:07:35 +0000230 }
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000231
232 // Don't shorten libcalls calls for now.
233
234 return false;
Pete Cooper856977c2011-11-09 23:07:35 +0000235}
236
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000237/// Returns true if the beginning of this instruction can be safely shortened
238/// in length.
239static bool isShortenableAtTheBeginning(Instruction *I) {
240 // FIXME: Handle only memset for now. Supporting memcpy/memmove should be
241 // easily done by offsetting the source address.
242 IntrinsicInst *II = dyn_cast<IntrinsicInst>(I);
243 return II && II->getIntrinsicID() == Intrinsic::memset;
244}
245
Justin Bogner594e07b2016-05-17 21:38:13 +0000246/// Return the pointer that is being written to.
Chris Lattner67122512010-11-30 21:58:14 +0000247static Value *getStoredPointerOperand(Instruction *I) {
Nick Lewycky90271472009-11-10 06:46:40 +0000248 if (StoreInst *SI = dyn_cast<StoreInst>(I))
249 return SI->getPointerOperand();
250 if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I))
Chris Lattner67122512010-11-30 21:58:14 +0000251 return MI->getDest();
Gabor Greif91f95892010-06-24 12:03:56 +0000252
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000253 if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) {
254 switch (II->getIntrinsicID()) {
255 default: llvm_unreachable("Unexpected intrinsic!");
256 case Intrinsic::init_trampoline:
257 return II->getArgOperand(0);
258 }
Duncan Sands1925d3a2009-11-10 13:49:50 +0000259 }
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000260
Benjamin Kramer3a09ef62015-04-10 14:50:08 +0000261 CallSite CS(I);
Nick Lewycky9f4729d2012-09-24 22:09:10 +0000262 // All the supported functions so far happen to have dest as their first
263 // argument.
264 return CS.getArgument(0);
Nick Lewycky90271472009-11-10 06:46:40 +0000265}
266
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000267static uint64_t getPointerSize(const Value *V, const DataLayout &DL,
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000268 const TargetLibraryInfo &TLI) {
Nuno Lopes55fff832012-06-21 15:45:28 +0000269 uint64_t Size;
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000270 if (getObjectSize(V, Size, DL, &TLI))
Nuno Lopes55fff832012-06-21 15:45:28 +0000271 return Size;
Chandler Carruthecbd1682015-06-17 07:21:38 +0000272 return MemoryLocation::UnknownSize;
Chris Lattner903add82010-11-30 23:43:23 +0000273}
Chris Lattner51c28a92010-11-30 19:34:42 +0000274
Pete Cooper856977c2011-11-09 23:07:35 +0000275namespace {
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000276enum OverwriteResult {
277 OverwriteBegin,
278 OverwriteComplete,
279 OverwriteEnd,
280 OverwriteUnknown
281};
Pete Cooper856977c2011-11-09 23:07:35 +0000282}
283
Hal Finkela1271032016-06-23 13:46:39 +0000284typedef DenseMap<Instruction *,
285 std::map<int64_t, int64_t>> InstOverlapIntervalsTy;
286
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000287/// Return 'OverwriteComplete' if a store to the 'Later' location completely
288/// overwrites a store to the 'Earlier' location, 'OverwriteEnd' if the end of
289/// the 'Earlier' location is completely overwritten by 'Later',
290/// 'OverwriteBegin' if the beginning of the 'Earlier' location is overwritten
291/// by 'Later', or 'OverwriteUnknown' if nothing can be determined.
Chandler Carruthac80dc72015-06-17 07:18:54 +0000292static OverwriteResult isOverwrite(const MemoryLocation &Later,
293 const MemoryLocation &Earlier,
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000294 const DataLayout &DL,
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000295 const TargetLibraryInfo &TLI,
Hal Finkela1271032016-06-23 13:46:39 +0000296 int64_t &EarlierOff, int64_t &LaterOff,
297 Instruction *DepWrite,
298 InstOverlapIntervalsTy &IOL) {
Chad Rosier72a793c2016-06-15 22:17:38 +0000299 // If we don't know the sizes of either access, then we can't do a comparison.
300 if (Later.Size == MemoryLocation::UnknownSize ||
301 Earlier.Size == MemoryLocation::UnknownSize)
302 return OverwriteUnknown;
303
Chris Lattnerc0f33792010-11-30 23:05:20 +0000304 const Value *P1 = Earlier.Ptr->stripPointerCasts();
305 const Value *P2 = Later.Ptr->stripPointerCasts();
Owen Anderson58704ee2011-09-06 18:14:09 +0000306
Chris Lattnerc0f33792010-11-30 23:05:20 +0000307 // If the start pointers are the same, we just have to compare sizes to see if
308 // the later store was larger than the earlier store.
309 if (P1 == P2) {
Chris Lattnerc0f33792010-11-30 23:05:20 +0000310 // Make sure that the Later size is >= the Earlier size.
Pete Cooper856977c2011-11-09 23:07:35 +0000311 if (Later.Size >= Earlier.Size)
312 return OverwriteComplete;
Chris Lattner77d79fa2010-11-30 19:28:23 +0000313 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000314
Chris Lattner903add82010-11-30 23:43:23 +0000315 // Check to see if the later store is to the entire object (either a global,
Reid Kleckner26af2ca2014-01-28 02:38:36 +0000316 // an alloca, or a byval/inalloca argument). If so, then it clearly
317 // overwrites any other store to the same object.
Rafael Espindola5f57f462014-02-21 18:34:28 +0000318 const Value *UO1 = GetUnderlyingObject(P1, DL),
319 *UO2 = GetUnderlyingObject(P2, DL);
Owen Anderson58704ee2011-09-06 18:14:09 +0000320
Chris Lattner903add82010-11-30 23:43:23 +0000321 // If we can't resolve the same pointers to the same object, then we can't
322 // analyze them at all.
323 if (UO1 != UO2)
Pete Cooper856977c2011-11-09 23:07:35 +0000324 return OverwriteUnknown;
Owen Anderson58704ee2011-09-06 18:14:09 +0000325
Chris Lattner903add82010-11-30 23:43:23 +0000326 // If the "Later" store is to a recognizable object, get its size.
Mehdi Aminia28d91d2015-03-10 02:37:25 +0000327 uint64_t ObjectSize = getPointerSize(UO2, DL, TLI);
Chandler Carruthecbd1682015-06-17 07:21:38 +0000328 if (ObjectSize != MemoryLocation::UnknownSize)
Pete Coopera4237c32011-11-10 20:22:08 +0000329 if (ObjectSize == Later.Size && ObjectSize >= Earlier.Size)
Pete Cooper856977c2011-11-09 23:07:35 +0000330 return OverwriteComplete;
Owen Anderson58704ee2011-09-06 18:14:09 +0000331
Chris Lattnerc0f33792010-11-30 23:05:20 +0000332 // Okay, we have stores to two completely different pointers. Try to
333 // decompose the pointer into a "base + constant_offset" form. If the base
334 // pointers are equal, then we can reason about the two stores.
Pete Cooper856977c2011-11-09 23:07:35 +0000335 EarlierOff = 0;
336 LaterOff = 0;
Rafael Espindola5f57f462014-02-21 18:34:28 +0000337 const Value *BP1 = GetPointerBaseWithConstantOffset(P1, EarlierOff, DL);
338 const Value *BP2 = GetPointerBaseWithConstantOffset(P2, LaterOff, DL);
Owen Anderson58704ee2011-09-06 18:14:09 +0000339
Chris Lattnerc0f33792010-11-30 23:05:20 +0000340 // If the base pointers still differ, we have two completely different stores.
341 if (BP1 != BP2)
Pete Cooper856977c2011-11-09 23:07:35 +0000342 return OverwriteUnknown;
Bill Wendlingdb40b5c2011-03-26 01:20:37 +0000343
Bill Wendling19f33b92011-03-26 08:02:59 +0000344 // The later store completely overlaps the earlier store if:
Owen Anderson58704ee2011-09-06 18:14:09 +0000345 //
Bill Wendling19f33b92011-03-26 08:02:59 +0000346 // 1. Both start at the same offset and the later one's size is greater than
347 // or equal to the earlier one's, or
348 //
349 // |--earlier--|
350 // |-- later --|
Owen Anderson58704ee2011-09-06 18:14:09 +0000351 //
Bill Wendling19f33b92011-03-26 08:02:59 +0000352 // 2. The earlier store has an offset greater than the later offset, but which
353 // still lies completely within the later store.
354 //
355 // |--earlier--|
356 // |----- later ------|
Bill Wendling50341592011-03-30 21:37:19 +0000357 //
358 // We have to be careful here as *Off is signed while *.Size is unsigned.
Bill Wendlingb5139922011-03-26 09:32:07 +0000359 if (EarlierOff >= LaterOff &&
Craig Topper2a404182012-08-14 07:32:05 +0000360 Later.Size >= Earlier.Size &&
Bill Wendling50341592011-03-30 21:37:19 +0000361 uint64_t(EarlierOff - LaterOff) + Earlier.Size <= Later.Size)
Pete Cooper856977c2011-11-09 23:07:35 +0000362 return OverwriteComplete;
Nadav Rotem465834c2012-07-24 10:51:42 +0000363
Hal Finkela1271032016-06-23 13:46:39 +0000364 // We may now overlap, although the overlap is not complete. There might also
365 // be other incomplete overlaps, and together, they might cover the complete
366 // earlier write.
367 // Note: The correctness of this logic depends on the fact that this function
368 // is not even called providing DepWrite when there are any intervening reads.
369 if (EnablePartialOverwriteTracking &&
370 LaterOff < int64_t(EarlierOff + Earlier.Size) &&
371 int64_t(LaterOff + Later.Size) >= EarlierOff) {
372
373 // Insert our part of the overlap into the map.
374 auto &IM = IOL[DepWrite];
375 DEBUG(dbgs() << "DSE: Partial overwrite: Earlier [" << EarlierOff << ", " <<
376 int64_t(EarlierOff + Earlier.Size) << ") Later [" <<
377 LaterOff << ", " << int64_t(LaterOff + Later.Size) << ")\n");
378
379 // Make sure that we only insert non-overlapping intervals and combine
380 // adjacent intervals. The intervals are stored in the map with the ending
381 // offset as the key (in the half-open sense) and the starting offset as
382 // the value.
383 int64_t LaterIntStart = LaterOff, LaterIntEnd = LaterOff + Later.Size;
384
385 // Find any intervals ending at, or after, LaterIntStart which start
386 // before LaterIntEnd.
387 auto ILI = IM.lower_bound(LaterIntStart);
Jun Bum Lim596a3bd2016-06-30 15:32:20 +0000388 if (ILI != IM.end() && ILI->second <= LaterIntEnd) {
389 // This existing interval is overlapped with the current store somewhere
390 // in [LaterIntStart, LaterIntEnd]. Merge them by erasing the existing
391 // intervals and adjusting our start and end.
Hal Finkela1271032016-06-23 13:46:39 +0000392 LaterIntStart = std::min(LaterIntStart, ILI->second);
393 LaterIntEnd = std::max(LaterIntEnd, ILI->first);
394 ILI = IM.erase(ILI);
395
Jun Bum Lim596a3bd2016-06-30 15:32:20 +0000396 // Continue erasing and adjusting our end in case other previous
397 // intervals are also overlapped with the current store.
398 //
399 // |--- ealier 1 ---| |--- ealier 2 ---|
400 // |------- later---------|
401 //
402 while (ILI != IM.end() && ILI->second <= LaterIntEnd) {
403 assert(ILI->second > LaterIntStart && "Unexpected interval");
Hal Finkela1271032016-06-23 13:46:39 +0000404 LaterIntEnd = std::max(LaterIntEnd, ILI->first);
Jun Bum Lim596a3bd2016-06-30 15:32:20 +0000405 ILI = IM.erase(ILI);
406 }
Hal Finkela1271032016-06-23 13:46:39 +0000407 }
408
409 IM[LaterIntEnd] = LaterIntStart;
410
411 ILI = IM.begin();
412 if (ILI->second <= EarlierOff &&
413 ILI->first >= int64_t(EarlierOff + Earlier.Size)) {
414 DEBUG(dbgs() << "DSE: Full overwrite from partials: Earlier [" <<
415 EarlierOff << ", " <<
416 int64_t(EarlierOff + Earlier.Size) <<
417 ") Composite Later [" <<
418 ILI->second << ", " << ILI->first << ")\n");
419 ++NumCompletePartials;
420 return OverwriteComplete;
421 }
422 }
423
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000424 // Another interesting case is if the later store overwrites the end of the
425 // earlier store.
Pete Cooper856977c2011-11-09 23:07:35 +0000426 //
427 // |--earlier--|
428 // |-- later --|
429 //
430 // In this case we may want to trim the size of earlier to avoid generating
431 // writes to addresses which will definitely be overwritten later
432 if (LaterOff > EarlierOff &&
433 LaterOff < int64_t(EarlierOff + Earlier.Size) &&
Pete Coopere03fe832011-12-03 00:04:30 +0000434 int64_t(LaterOff + Later.Size) >= int64_t(EarlierOff + Earlier.Size))
Pete Cooper856977c2011-11-09 23:07:35 +0000435 return OverwriteEnd;
Bill Wendling19f33b92011-03-26 08:02:59 +0000436
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000437 // Finally, we also need to check if the later store overwrites the beginning
438 // of the earlier store.
439 //
440 // |--earlier--|
441 // |-- later --|
442 //
443 // In this case we may want to move the destination address and trim the size
444 // of earlier to avoid generating writes to addresses which will definitely
445 // be overwritten later.
446 if (LaterOff <= EarlierOff && int64_t(LaterOff + Later.Size) > EarlierOff) {
447 assert (int64_t(LaterOff + Later.Size) < int64_t(EarlierOff + Earlier.Size)
448 && "Expect to be handled as OverwriteComplete" );
449 return OverwriteBegin;
450 }
Bill Wendling19f33b92011-03-26 08:02:59 +0000451 // Otherwise, they don't completely overlap.
Pete Cooper856977c2011-11-09 23:07:35 +0000452 return OverwriteUnknown;
Nick Lewycky90271472009-11-10 06:46:40 +0000453}
454
Justin Bogner594e07b2016-05-17 21:38:13 +0000455/// If 'Inst' might be a self read (i.e. a noop copy of a
Chris Lattner94fbdf32010-12-06 01:48:06 +0000456/// memory region into an identical pointer) then it doesn't actually make its
Owen Anderson58704ee2011-09-06 18:14:09 +0000457/// input dead in the traditional sense. Consider this case:
Chris Lattner94fbdf32010-12-06 01:48:06 +0000458///
459/// memcpy(A <- B)
460/// memcpy(A <- A)
461///
462/// In this case, the second store to A does not make the first store to A dead.
463/// The usual situation isn't an explicit A<-A store like this (which can be
464/// trivially removed) but a case where two pointers may alias.
465///
466/// This function detects when it is unsafe to remove a dependent instruction
467/// because the DSE inducing instruction may be a self-read.
468static bool isPossibleSelfRead(Instruction *Inst,
Chandler Carruthac80dc72015-06-17 07:18:54 +0000469 const MemoryLocation &InstStoreLoc,
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000470 Instruction *DepWrite,
471 const TargetLibraryInfo &TLI,
472 AliasAnalysis &AA) {
Chris Lattner94fbdf32010-12-06 01:48:06 +0000473 // Self reads can only happen for instructions that read memory. Get the
474 // location read.
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000475 MemoryLocation InstReadLoc = getLocForRead(Inst, TLI);
Craig Topperf40110f2014-04-25 05:29:35 +0000476 if (!InstReadLoc.Ptr) return false; // Not a reading instruction.
Owen Anderson58704ee2011-09-06 18:14:09 +0000477
Chris Lattner94fbdf32010-12-06 01:48:06 +0000478 // If the read and written loc obviously don't alias, it isn't a read.
479 if (AA.isNoAlias(InstReadLoc, InstStoreLoc)) return false;
Owen Anderson58704ee2011-09-06 18:14:09 +0000480
Chris Lattner94fbdf32010-12-06 01:48:06 +0000481 // Okay, 'Inst' may copy over itself. However, we can still remove a the
482 // DepWrite instruction if we can prove that it reads from the same location
483 // as Inst. This handles useful cases like:
484 // memcpy(A <- B)
485 // memcpy(A <- B)
486 // Here we don't know if A/B may alias, but we do know that B/B are must
487 // aliases, so removing the first memcpy is safe (assuming it writes <= #
488 // bytes as the second one.
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000489 MemoryLocation DepReadLoc = getLocForRead(DepWrite, TLI);
Owen Anderson58704ee2011-09-06 18:14:09 +0000490
Chris Lattner94fbdf32010-12-06 01:48:06 +0000491 if (DepReadLoc.Ptr && AA.isMustAlias(InstReadLoc.Ptr, DepReadLoc.Ptr))
492 return false;
Owen Anderson58704ee2011-09-06 18:14:09 +0000493
Chris Lattner94fbdf32010-12-06 01:48:06 +0000494 // If DepWrite doesn't read memory or if we can't prove it is a must alias,
495 // then it can't be considered dead.
496 return true;
497}
498
Chris Lattner67122512010-11-30 21:58:14 +0000499
Justin Bogner594e07b2016-05-17 21:38:13 +0000500/// Returns true if the memory which is accessed by the second instruction is not
501/// modified between the first and the second instruction.
502/// Precondition: Second instruction must be dominated by the first
503/// instruction.
504static bool memoryIsNotModifiedBetween(Instruction *FirstI,
505 Instruction *SecondI,
506 AliasAnalysis *AA) {
507 SmallVector<BasicBlock *, 16> WorkList;
508 SmallPtrSet<BasicBlock *, 8> Visited;
509 BasicBlock::iterator FirstBBI(FirstI);
510 ++FirstBBI;
511 BasicBlock::iterator SecondBBI(SecondI);
512 BasicBlock *FirstBB = FirstI->getParent();
513 BasicBlock *SecondBB = SecondI->getParent();
514 MemoryLocation MemLoc = MemoryLocation::get(SecondI);
Chris Lattner67122512010-11-30 21:58:14 +0000515
Justin Bogner594e07b2016-05-17 21:38:13 +0000516 // Start checking the store-block.
517 WorkList.push_back(SecondBB);
518 bool isFirstBlock = true;
519
520 // Check all blocks going backward until we reach the load-block.
521 while (!WorkList.empty()) {
522 BasicBlock *B = WorkList.pop_back_val();
523
524 // Ignore instructions before LI if this is the FirstBB.
525 BasicBlock::iterator BI = (B == FirstBB ? FirstBBI : B->begin());
526
527 BasicBlock::iterator EI;
528 if (isFirstBlock) {
529 // Ignore instructions after SI if this is the first visit of SecondBB.
530 assert(B == SecondBB && "first block is not the store block");
531 EI = SecondBBI;
532 isFirstBlock = false;
533 } else {
534 // It's not SecondBB or (in case of a loop) the second visit of SecondBB.
535 // In this case we also have to look at instructions after SI.
536 EI = B->end();
537 }
538 for (; BI != EI; ++BI) {
539 Instruction *I = &*BI;
540 if (I->mayWriteToMemory() && I != SecondI) {
541 auto Res = AA->getModRefInfo(I, MemLoc);
542 if (Res != MRI_NoModRef)
543 return false;
544 }
545 }
546 if (B != FirstBB) {
547 assert(B != &FirstBB->getParent()->getEntryBlock() &&
548 "Should not hit the entry block because SI must be dominated by LI");
549 for (auto PredI = pred_begin(B), PE = pred_end(B); PredI != PE; ++PredI) {
550 if (!Visited.insert(*PredI).second)
551 continue;
552 WorkList.push_back(*PredI);
553 }
554 }
555 }
556 return true;
557}
558
559/// Find all blocks that will unconditionally lead to the block BB and append
560/// them to F.
561static void findUnconditionalPreds(SmallVectorImpl<BasicBlock *> &Blocks,
562 BasicBlock *BB, DominatorTree *DT) {
563 for (pred_iterator I = pred_begin(BB), E = pred_end(BB); I != E; ++I) {
564 BasicBlock *Pred = *I;
565 if (Pred == BB) continue;
566 TerminatorInst *PredTI = Pred->getTerminator();
567 if (PredTI->getNumSuccessors() != 1)
568 continue;
569
570 if (DT->isReachableFromEntry(Pred))
571 Blocks.push_back(Pred);
572 }
573}
574
575/// Handle frees of entire structures whose dependency is a store
576/// to a field of that structure.
577static bool handleFree(CallInst *F, AliasAnalysis *AA,
578 MemoryDependenceResults *MD, DominatorTree *DT,
579 const TargetLibraryInfo *TLI) {
580 bool MadeChange = false;
581
582 MemoryLocation Loc = MemoryLocation(F->getOperand(0));
583 SmallVector<BasicBlock *, 16> Blocks;
584 Blocks.push_back(F->getParent());
585 const DataLayout &DL = F->getModule()->getDataLayout();
586
587 while (!Blocks.empty()) {
588 BasicBlock *BB = Blocks.pop_back_val();
589 Instruction *InstPt = BB->getTerminator();
590 if (BB == F->getParent()) InstPt = F;
591
592 MemDepResult Dep =
593 MD->getPointerDependencyFrom(Loc, false, InstPt->getIterator(), BB);
594 while (Dep.isDef() || Dep.isClobber()) {
595 Instruction *Dependency = Dep.getInst();
596 if (!hasMemoryWrite(Dependency, *TLI) || !isRemovable(Dependency))
597 break;
598
599 Value *DepPointer =
600 GetUnderlyingObject(getStoredPointerOperand(Dependency), DL);
601
602 // Check for aliasing.
603 if (!AA->isMustAlias(F->getArgOperand(0), DepPointer))
604 break;
605
606 auto Next = ++Dependency->getIterator();
607
Chad Rosier840b3ef2016-06-10 17:59:22 +0000608 // DCE instructions only used to calculate that store.
Justin Bogner594e07b2016-05-17 21:38:13 +0000609 deleteDeadInstruction(Dependency, *MD, *TLI);
610 ++NumFastStores;
611 MadeChange = true;
612
613 // Inst's old Dependency is now deleted. Compute the next dependency,
614 // which may also be dead, as in
615 // s[0] = 0;
616 // s[1] = 0; // This has just been deleted.
617 // free(s);
618 Dep = MD->getPointerDependencyFrom(Loc, false, Next, BB);
619 }
620
621 if (Dep.isNonLocal())
622 findUnconditionalPreds(Blocks, BB, DT);
623 }
624
625 return MadeChange;
626}
627
628/// Check to see if the specified location may alias any of the stack objects in
629/// the DeadStackObjects set. If so, they become live because the location is
630/// being loaded.
631static void removeAccessedObjects(const MemoryLocation &LoadedLoc,
632 SmallSetVector<Value *, 16> &DeadStackObjects,
633 const DataLayout &DL, AliasAnalysis *AA,
634 const TargetLibraryInfo *TLI) {
635 const Value *UnderlyingPointer = GetUnderlyingObject(LoadedLoc.Ptr, DL);
636
637 // A constant can't be in the dead pointer set.
638 if (isa<Constant>(UnderlyingPointer))
639 return;
640
641 // If the kill pointer can be easily reduced to an alloca, don't bother doing
642 // extraneous AA queries.
643 if (isa<AllocaInst>(UnderlyingPointer) || isa<Argument>(UnderlyingPointer)) {
644 DeadStackObjects.remove(const_cast<Value*>(UnderlyingPointer));
645 return;
646 }
647
648 // Remove objects that could alias LoadedLoc.
649 DeadStackObjects.remove_if([&](Value *I) {
650 // See if the loaded location could alias the stack location.
651 MemoryLocation StackLoc(I, getPointerSize(I, DL, *TLI));
652 return !AA->isNoAlias(StackLoc, LoadedLoc);
653 });
654}
655
656/// Remove dead stores to stack-allocated locations in the function end block.
657/// Ex:
658/// %A = alloca i32
659/// ...
660/// store i32 1, i32* %A
661/// ret void
662static bool handleEndBlock(BasicBlock &BB, AliasAnalysis *AA,
663 MemoryDependenceResults *MD,
664 const TargetLibraryInfo *TLI) {
665 bool MadeChange = false;
666
667 // Keep track of all of the stack objects that are dead at the end of the
668 // function.
669 SmallSetVector<Value*, 16> DeadStackObjects;
670
671 // Find all of the alloca'd pointers in the entry block.
672 BasicBlock &Entry = BB.getParent()->front();
673 for (Instruction &I : Entry) {
674 if (isa<AllocaInst>(&I))
675 DeadStackObjects.insert(&I);
676
677 // Okay, so these are dead heap objects, but if the pointer never escapes
678 // then it's leaked by this function anyways.
679 else if (isAllocLikeFn(&I, TLI) && !PointerMayBeCaptured(&I, true, true))
680 DeadStackObjects.insert(&I);
681 }
682
683 // Treat byval or inalloca arguments the same, stores to them are dead at the
684 // end of the function.
685 for (Argument &AI : BB.getParent()->args())
686 if (AI.hasByValOrInAllocaAttr())
687 DeadStackObjects.insert(&AI);
688
689 const DataLayout &DL = BB.getModule()->getDataLayout();
690
691 // Scan the basic block backwards
692 for (BasicBlock::iterator BBI = BB.end(); BBI != BB.begin(); ){
693 --BBI;
694
695 // If we find a store, check to see if it points into a dead stack value.
696 if (hasMemoryWrite(&*BBI, *TLI) && isRemovable(&*BBI)) {
697 // See through pointer-to-pointer bitcasts
698 SmallVector<Value *, 4> Pointers;
699 GetUnderlyingObjects(getStoredPointerOperand(&*BBI), Pointers, DL);
700
701 // Stores to stack values are valid candidates for removal.
702 bool AllDead = true;
Benjamin Kramer135f7352016-06-26 12:28:59 +0000703 for (Value *Pointer : Pointers)
704 if (!DeadStackObjects.count(Pointer)) {
Justin Bogner594e07b2016-05-17 21:38:13 +0000705 AllDead = false;
706 break;
707 }
708
709 if (AllDead) {
710 Instruction *Dead = &*BBI++;
711
712 DEBUG(dbgs() << "DSE: Dead Store at End of Block:\n DEAD: "
713 << *Dead << "\n Objects: ";
714 for (SmallVectorImpl<Value *>::iterator I = Pointers.begin(),
715 E = Pointers.end(); I != E; ++I) {
716 dbgs() << **I;
717 if (std::next(I) != E)
718 dbgs() << ", ";
719 }
720 dbgs() << '\n');
721
722 // DCE instructions only used to calculate that store.
723 deleteDeadInstruction(Dead, *MD, *TLI, &DeadStackObjects);
724 ++NumFastStores;
725 MadeChange = true;
726 continue;
727 }
728 }
729
730 // Remove any dead non-memory-mutating instructions.
731 if (isInstructionTriviallyDead(&*BBI, TLI)) {
732 Instruction *Inst = &*BBI++;
733 deleteDeadInstruction(Inst, *MD, *TLI, &DeadStackObjects);
734 ++NumFastOther;
735 MadeChange = true;
736 continue;
737 }
738
739 if (isa<AllocaInst>(BBI)) {
740 // Remove allocas from the list of dead stack objects; there can't be
741 // any references before the definition.
742 DeadStackObjects.remove(&*BBI);
743 continue;
744 }
745
746 if (auto CS = CallSite(&*BBI)) {
747 // Remove allocation function calls from the list of dead stack objects;
748 // there can't be any references before the definition.
749 if (isAllocLikeFn(&*BBI, TLI))
750 DeadStackObjects.remove(&*BBI);
751
752 // If this call does not access memory, it can't be loading any of our
753 // pointers.
754 if (AA->doesNotAccessMemory(CS))
755 continue;
756
757 // If the call might load from any of our allocas, then any store above
758 // the call is live.
759 DeadStackObjects.remove_if([&](Value *I) {
760 // See if the call site touches the value.
761 ModRefInfo A = AA->getModRefInfo(CS, I, getPointerSize(I, DL, *TLI));
762
763 return A == MRI_ModRef || A == MRI_Ref;
764 });
765
766 // If all of the allocas were clobbered by the call then we're not going
767 // to find anything else to process.
768 if (DeadStackObjects.empty())
769 break;
770
771 continue;
772 }
773
774 MemoryLocation LoadedLoc;
775
776 // If we encounter a use of the pointer, it is no longer considered dead
777 if (LoadInst *L = dyn_cast<LoadInst>(BBI)) {
778 if (!L->isUnordered()) // Be conservative with atomic/volatile load
779 break;
780 LoadedLoc = MemoryLocation::get(L);
781 } else if (VAArgInst *V = dyn_cast<VAArgInst>(BBI)) {
782 LoadedLoc = MemoryLocation::get(V);
783 } else if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(BBI)) {
784 LoadedLoc = MemoryLocation::getForSource(MTI);
785 } else if (!BBI->mayReadFromMemory()) {
786 // Instruction doesn't read memory. Note that stores that weren't removed
787 // above will hit this case.
788 continue;
789 } else {
790 // Unknown inst; assume it clobbers everything.
791 break;
792 }
793
794 // Remove any allocas from the DeadPointer set that are loaded, as this
795 // makes any stores above the access live.
796 removeAccessedObjects(LoadedLoc, DeadStackObjects, DL, AA, TLI);
797
798 // If all of the allocas were clobbered by the access then we're not going
799 // to find anything else to process.
800 if (DeadStackObjects.empty())
801 break;
802 }
803
804 return MadeChange;
805}
806
807static bool eliminateDeadStores(BasicBlock &BB, AliasAnalysis *AA,
808 MemoryDependenceResults *MD, DominatorTree *DT,
809 const TargetLibraryInfo *TLI) {
Igor Laevsky029bd932015-09-23 11:38:44 +0000810 const DataLayout &DL = BB.getModule()->getDataLayout();
Owen Anderson5e72db32007-07-11 00:46:18 +0000811 bool MadeChange = false;
Owen Anderson58704ee2011-09-06 18:14:09 +0000812
Hal Finkela1271032016-06-23 13:46:39 +0000813 // A map of interval maps representing partially-overwritten value parts.
814 InstOverlapIntervalsTy IOL;
815
Chris Lattner49162672009-09-02 06:31:02 +0000816 // Do a top-down walk on the BB.
Chris Lattnerf2a8ba42008-11-28 21:29:52 +0000817 for (BasicBlock::iterator BBI = BB.begin(), BBE = BB.end(); BBI != BBE; ) {
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000818 Instruction *Inst = &*BBI++;
Owen Anderson58704ee2011-09-06 18:14:09 +0000819
Chris Lattner9d179d92010-11-30 01:28:33 +0000820 // Handle 'free' calls specially.
Nick Lewycky135ac9a2012-09-24 22:07:09 +0000821 if (CallInst *F = isFreeCall(Inst, TLI)) {
Justin Bogner594e07b2016-05-17 21:38:13 +0000822 MadeChange |= handleFree(F, AA, MD, DT, TLI);
Chris Lattner9d179d92010-11-30 01:28:33 +0000823 continue;
824 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000825
Chris Lattner2227a8a2010-11-30 01:37:52 +0000826 // If we find something that writes memory, get its memory dependence.
Chandler Carruthdbe40fb2015-08-12 18:01:44 +0000827 if (!hasMemoryWrite(Inst, *TLI))
Owen Anderson0aecf0e2007-08-08 04:52:29 +0000828 continue;
Chris Lattnerd4f10902010-11-30 00:01:19 +0000829
Rui Ueyamac487f772014-08-06 19:30:38 +0000830 // If we're storing the same value back to a pointer that we just
831 // loaded from, then the store can be removed.
Nick Lewycky90271472009-11-10 06:46:40 +0000832 if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
Igor Laevsky029bd932015-09-23 11:38:44 +0000833
834 auto RemoveDeadInstAndUpdateBBI = [&](Instruction *DeadInst) {
Justin Bogner594e07b2016-05-17 21:38:13 +0000835 // deleteDeadInstruction can delete the current instruction. Save BBI
Igor Laevsky029bd932015-09-23 11:38:44 +0000836 // in case we need it.
Duncan P. N. Exon Smith3a9c9e32015-10-13 18:26:00 +0000837 WeakVH NextInst(&*BBI);
Igor Laevsky029bd932015-09-23 11:38:44 +0000838
Justin Bogner594e07b2016-05-17 21:38:13 +0000839 deleteDeadInstruction(DeadInst, *MD, *TLI);
Igor Laevsky029bd932015-09-23 11:38:44 +0000840
841 if (!NextInst) // Next instruction deleted.
842 BBI = BB.begin();
843 else if (BBI != BB.begin()) // Revisit this instruction if possible.
844 --BBI;
845 ++NumRedundantStores;
846 MadeChange = true;
847 };
848
Erik Eckstein11fc8172015-08-13 15:36:11 +0000849 if (LoadInst *DepLoad = dyn_cast<LoadInst>(SI->getValueOperand())) {
Nick Lewycky90271472009-11-10 06:46:40 +0000850 if (SI->getPointerOperand() == DepLoad->getPointerOperand() &&
Erik Eckstein11fc8172015-08-13 15:36:11 +0000851 isRemovable(SI) &&
Justin Bogner594e07b2016-05-17 21:38:13 +0000852 memoryIsNotModifiedBetween(DepLoad, SI, AA)) {
Erik Eckstein11fc8172015-08-13 15:36:11 +0000853
Chris Lattnerca335e32010-12-06 21:13:51 +0000854 DEBUG(dbgs() << "DSE: Remove Store Of Load from same pointer:\n "
855 << "LOAD: " << *DepLoad << "\n STORE: " << *SI << '\n');
Philip Reames00c9b642014-08-05 17:48:20 +0000856
Igor Laevsky029bd932015-09-23 11:38:44 +0000857 RemoveDeadInstAndUpdateBBI(SI);
858 continue;
859 }
860 }
Philip Reames00c9b642014-08-05 17:48:20 +0000861
Igor Laevsky029bd932015-09-23 11:38:44 +0000862 // Remove null stores into the calloc'ed objects
863 Constant *StoredConstant = dyn_cast<Constant>(SI->getValueOperand());
Rui Ueyamac487f772014-08-06 19:30:38 +0000864
Igor Laevsky029bd932015-09-23 11:38:44 +0000865 if (StoredConstant && StoredConstant->isNullValue() &&
866 isRemovable(SI)) {
867 Instruction *UnderlyingPointer = dyn_cast<Instruction>(
868 GetUnderlyingObject(SI->getPointerOperand(), DL));
869
870 if (UnderlyingPointer && isCallocLikeFn(UnderlyingPointer, TLI) &&
Justin Bogner594e07b2016-05-17 21:38:13 +0000871 memoryIsNotModifiedBetween(UnderlyingPointer, SI, AA)) {
Igor Laevsky029bd932015-09-23 11:38:44 +0000872 DEBUG(dbgs()
873 << "DSE: Remove null store to the calloc'ed object:\n DEAD: "
874 << *Inst << "\n OBJECT: " << *UnderlyingPointer << '\n');
875
876 RemoveDeadInstAndUpdateBBI(SI);
Rui Ueyamac487f772014-08-06 19:30:38 +0000877 continue;
878 }
Philip Reames00c9b642014-08-05 17:48:20 +0000879 }
Owen Anderson5e72db32007-07-11 00:46:18 +0000880 }
Owen Anderson58704ee2011-09-06 18:14:09 +0000881
Erik Eckstein11fc8172015-08-13 15:36:11 +0000882 MemDepResult InstDep = MD->getDependency(Inst);
883
Chad Rosierd7634fc2015-12-11 18:39:41 +0000884 // Ignore any store where we can't find a local dependence.
885 // FIXME: cross-block DSE would be fun. :)
886 if (!InstDep.isDef() && !InstDep.isClobber())
887 continue;
Erik Eckstein11fc8172015-08-13 15:36:11 +0000888
Chad Rosierd7634fc2015-12-11 18:39:41 +0000889 // Figure out what location is being stored to.
890 MemoryLocation Loc = getLocForWrite(Inst, *AA);
Chris Lattner58b779e2010-11-30 07:23:21 +0000891
Chad Rosierd7634fc2015-12-11 18:39:41 +0000892 // If we didn't get a useful location, fail.
893 if (!Loc.Ptr)
894 continue;
895
896 while (InstDep.isDef() || InstDep.isClobber()) {
897 // Get the memory clobbered by the instruction we depend on. MemDep will
898 // skip any instructions that 'Loc' clearly doesn't interact with. If we
899 // end up depending on a may- or must-aliased load, then we can't optimize
Chad Rosier844e2df2016-06-15 21:41:22 +0000900 // away the store and we bail out. However, if we depend on something
Chad Rosierd7634fc2015-12-11 18:39:41 +0000901 // that overwrites the memory location we *can* potentially optimize it.
902 //
903 // Find out what memory location the dependent instruction stores.
904 Instruction *DepWrite = InstDep.getInst();
905 MemoryLocation DepLoc = getLocForWrite(DepWrite, *AA);
906 // If we didn't get a useful location, or if it isn't a size, bail out.
907 if (!DepLoc.Ptr)
908 break;
909
910 // If we find a write that is a) removable (i.e., non-volatile), b) is
911 // completely obliterated by the store to 'Loc', and c) which we know that
912 // 'Inst' doesn't load from, then we can remove it.
913 if (isRemovable(DepWrite) &&
914 !isPossibleSelfRead(Inst, Loc, DepWrite, *TLI, *AA)) {
915 int64_t InstWriteOffset, DepWriteOffset;
916 OverwriteResult OR =
Hal Finkela1271032016-06-23 13:46:39 +0000917 isOverwrite(Loc, DepLoc, DL, *TLI, DepWriteOffset, InstWriteOffset,
918 DepWrite, IOL);
Chad Rosierd7634fc2015-12-11 18:39:41 +0000919 if (OR == OverwriteComplete) {
920 DEBUG(dbgs() << "DSE: Remove Dead Store:\n DEAD: "
921 << *DepWrite << "\n KILLER: " << *Inst << '\n');
922
923 // Delete the store and now-dead instructions that feed it.
Justin Bogner594e07b2016-05-17 21:38:13 +0000924 deleteDeadInstruction(DepWrite, *MD, *TLI);
Chad Rosierd7634fc2015-12-11 18:39:41 +0000925 ++NumFastStores;
926 MadeChange = true;
927
Justin Bogner594e07b2016-05-17 21:38:13 +0000928 // deleteDeadInstruction can delete the current instruction in loop
Chad Rosierd7634fc2015-12-11 18:39:41 +0000929 // cases, reset BBI.
930 BBI = Inst->getIterator();
Patrik Hagglund72052152016-06-20 09:10:10 +0000931 auto BBBegin = BB.begin();
932 while (BBI != BBBegin && isa<DbgInfoIntrinsic>(*(--BBI)))
933 ;
Pete Cooper856977c2011-11-09 23:07:35 +0000934 break;
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000935 } else if ((OR == OverwriteEnd && isShortenableAtTheEnd(DepWrite)) ||
936 ((OR == OverwriteBegin &&
937 isShortenableAtTheBeginning(DepWrite)))) {
Chad Rosierd7634fc2015-12-11 18:39:41 +0000938 // TODO: base this on the target vector size so that if the earlier
939 // store was too small to get vector writes anyway then its likely
940 // a good idea to shorten it
941 // Power of 2 vector writes are probably always a bad idea to optimize
942 // as any store/memset/memcpy is likely using vector instructions so
943 // shortening it to not vector size is likely to be slower
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000944 MemIntrinsic *DepIntrinsic = cast<MemIntrinsic>(DepWrite);
Chad Rosierd7634fc2015-12-11 18:39:41 +0000945 unsigned DepWriteAlign = DepIntrinsic->getAlignment();
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000946 bool IsOverwriteEnd = (OR == OverwriteEnd);
947 if (!IsOverwriteEnd)
948 InstWriteOffset = int64_t(InstWriteOffset + Loc.Size);
949
950 if ((llvm::isPowerOf2_64(InstWriteOffset) &&
951 DepWriteAlign <= InstWriteOffset) ||
Chad Rosierd7634fc2015-12-11 18:39:41 +0000952 ((DepWriteAlign != 0) && InstWriteOffset % DepWriteAlign == 0)) {
Nadav Rotem465834c2012-07-24 10:51:42 +0000953
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000954 DEBUG(dbgs() << "DSE: Remove Dead Store:\n OW "
955 << (IsOverwriteEnd ? "END" : "BEGIN") << ": "
956 << *DepWrite << "\n KILLER (offset "
957 << InstWriteOffset << ", " << DepLoc.Size << ")"
958 << *Inst << '\n');
Nadav Rotem465834c2012-07-24 10:51:42 +0000959
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000960 int64_t NewLength =
961 IsOverwriteEnd
962 ? InstWriteOffset - DepWriteOffset
963 : DepLoc.Size - (InstWriteOffset - DepWriteOffset);
964
965 Value *DepWriteLength = DepIntrinsic->getLength();
966 Value *TrimmedLength =
967 ConstantInt::get(DepWriteLength->getType(), NewLength);
Chad Rosierd7634fc2015-12-11 18:39:41 +0000968 DepIntrinsic->setLength(TrimmedLength);
Jun Bum Limd29a24e2016-04-22 19:51:29 +0000969
970 if (!IsOverwriteEnd) {
971 int64_t OffsetMoved = (InstWriteOffset - DepWriteOffset);
972 Value *Indices[1] = {
973 ConstantInt::get(DepWriteLength->getType(), OffsetMoved)};
974 GetElementPtrInst *NewDestGEP = GetElementPtrInst::CreateInBounds(
975 DepIntrinsic->getRawDest(), Indices, "", DepWrite);
976 DepIntrinsic->setDest(NewDestGEP);
977 }
Pete Cooper856977c2011-11-09 23:07:35 +0000978 MadeChange = true;
979 }
980 }
Chris Lattner58b779e2010-11-30 07:23:21 +0000981 }
Chad Rosierd7634fc2015-12-11 18:39:41 +0000982
983 // If this is a may-aliased store that is clobbering the store value, we
984 // can keep searching past it for another must-aliased pointer that stores
985 // to the same location. For example, in:
986 // store -> P
987 // store -> Q
988 // store -> P
989 // we can remove the first store to P even though we don't know if P and Q
990 // alias.
991 if (DepWrite == &BB.front()) break;
992
993 // Can't look past this instruction if it might read 'Loc'.
994 if (AA->getModRefInfo(DepWrite, Loc) & MRI_Ref)
995 break;
996
997 InstDep = MD->getPointerDependencyFrom(Loc, false,
998 DepWrite->getIterator(), &BB);
Owen Anderson2b2bd282009-10-28 07:05:35 +0000999 }
Owen Anderson5e72db32007-07-11 00:46:18 +00001000 }
Owen Anderson58704ee2011-09-06 18:14:09 +00001001
Chris Lattnerf2a8ba42008-11-28 21:29:52 +00001002 // If this block ends in a return, unwind, or unreachable, all allocas are
1003 // dead at its end, which means stores to them are also dead.
Owen Anderson32c4a052007-07-12 21:41:30 +00001004 if (BB.getTerminator()->getNumSuccessors() == 0)
Justin Bogner594e07b2016-05-17 21:38:13 +00001005 MadeChange |= handleEndBlock(BB, AA, MD, TLI);
Owen Anderson58704ee2011-09-06 18:14:09 +00001006
Owen Anderson5e72db32007-07-11 00:46:18 +00001007 return MadeChange;
1008}
1009
Justin Bogner594e07b2016-05-17 21:38:13 +00001010static bool eliminateDeadStores(Function &F, AliasAnalysis *AA,
1011 MemoryDependenceResults *MD, DominatorTree *DT,
1012 const TargetLibraryInfo *TLI) {
Eli Friedman7d58bc72011-06-15 00:47:34 +00001013 bool MadeChange = false;
Justin Bogner594e07b2016-05-17 21:38:13 +00001014 for (BasicBlock &BB : F)
1015 // Only check non-dead blocks. Dead blocks may have strange pointer
1016 // cycles that will confuse alias analysis.
1017 if (DT->isReachableFromEntry(&BB))
1018 MadeChange |= eliminateDeadStores(BB, AA, MD, DT, TLI);
Eli Friedman7d58bc72011-06-15 00:47:34 +00001019 return MadeChange;
Owen Andersonaa071722007-07-11 23:19:17 +00001020}
1021
Justin Bogner594e07b2016-05-17 21:38:13 +00001022//===----------------------------------------------------------------------===//
1023// DSE Pass
1024//===----------------------------------------------------------------------===//
1025PreservedAnalyses DSEPass::run(Function &F, FunctionAnalysisManager &AM) {
1026 AliasAnalysis *AA = &AM.getResult<AAManager>(F);
1027 DominatorTree *DT = &AM.getResult<DominatorTreeAnalysis>(F);
1028 MemoryDependenceResults *MD = &AM.getResult<MemoryDependenceAnalysis>(F);
1029 const TargetLibraryInfo *TLI = &AM.getResult<TargetLibraryAnalysis>(F);
Owen Anderson58704ee2011-09-06 18:14:09 +00001030
Justin Bogner594e07b2016-05-17 21:38:13 +00001031 if (!eliminateDeadStores(F, AA, MD, DT, TLI))
1032 return PreservedAnalyses::all();
1033 PreservedAnalyses PA;
1034 PA.preserve<DominatorTreeAnalysis>();
1035 PA.preserve<GlobalsAA>();
1036 PA.preserve<MemoryDependenceAnalysis>();
1037 return PA;
Owen Anderson32c4a052007-07-12 21:41:30 +00001038}
1039
Justin Bogner594e07b2016-05-17 21:38:13 +00001040/// A legacy pass for the legacy pass manager that wraps \c DSEPass.
1041class DSELegacyPass : public FunctionPass {
1042public:
1043 DSELegacyPass() : FunctionPass(ID) {
1044 initializeDSELegacyPassPass(*PassRegistry::getPassRegistry());
Owen Andersonddf4aee2007-08-08 18:38:28 +00001045 }
Owen Anderson58704ee2011-09-06 18:14:09 +00001046
Justin Bogner594e07b2016-05-17 21:38:13 +00001047 bool runOnFunction(Function &F) override {
1048 if (skipFunction(F))
1049 return false;
1050
1051 DominatorTree *DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree();
1052 AliasAnalysis *AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
1053 MemoryDependenceResults *MD =
1054 &getAnalysis<MemoryDependenceWrapperPass>().getMemDep();
1055 const TargetLibraryInfo *TLI =
1056 &getAnalysis<TargetLibraryInfoWrapperPass>().getTLI();
1057
1058 return eliminateDeadStores(F, AA, MD, DT, TLI);
1059 }
1060
1061 void getAnalysisUsage(AnalysisUsage &AU) const override {
1062 AU.setPreservesCFG();
1063 AU.addRequired<DominatorTreeWrapperPass>();
1064 AU.addRequired<AAResultsWrapperPass>();
1065 AU.addRequired<MemoryDependenceWrapperPass>();
1066 AU.addRequired<TargetLibraryInfoWrapperPass>();
1067 AU.addPreserved<DominatorTreeWrapperPass>();
1068 AU.addPreserved<GlobalsAAWrapperPass>();
1069 AU.addPreserved<MemoryDependenceWrapperPass>();
1070 }
1071
1072 static char ID; // Pass identification, replacement for typeid
1073};
1074
1075char DSELegacyPass::ID = 0;
1076INITIALIZE_PASS_BEGIN(DSELegacyPass, "dse", "Dead Store Elimination", false,
1077 false)
1078INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
1079INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
1080INITIALIZE_PASS_DEPENDENCY(GlobalsAAWrapperPass)
1081INITIALIZE_PASS_DEPENDENCY(MemoryDependenceWrapperPass)
1082INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
1083INITIALIZE_PASS_END(DSELegacyPass, "dse", "Dead Store Elimination", false,
1084 false)
1085
1086FunctionPass *llvm::createDeadStoreEliminationPass() {
1087 return new DSELegacyPass();
Owen Anderson32c4a052007-07-12 21:41:30 +00001088}