blob: de99c3f8b6970fd9210be4de87294cba67fe4d24 [file] [log] [blame]
Chris Lattner72bc70d2008-12-05 07:49:08 +00001//===- GVN.cpp - Eliminate redundant values and loads ---------------------===//
Owen Anderson1ad2cb72007-07-24 17:55:58 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner4ee451d2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Owen Anderson1ad2cb72007-07-24 17:55:58 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This pass performs global value numbering to eliminate fully redundant
11// instructions. It also performs simple dead load elimination.
12//
John Criswell090c0a22009-03-10 15:04:53 +000013// Note that this pass does the value numbering itself; it does not use the
Matthijs Kooijman845f5242008-06-05 07:55:49 +000014// ValueNumbering analysis passes.
15//
Owen Anderson1ad2cb72007-07-24 17:55:58 +000016//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "gvn"
Owen Anderson1ad2cb72007-07-24 17:55:58 +000019#include "llvm/Transforms/Scalar.h"
Owen Anderson0cd32032007-07-25 19:57:03 +000020#include "llvm/BasicBlock.h"
Owen Anderson45537912007-07-26 18:26:51 +000021#include "llvm/Constants.h"
Owen Anderson1ad2cb72007-07-24 17:55:58 +000022#include "llvm/DerivedTypes.h"
Chris Lattnera53cfd12009-12-28 21:28:46 +000023#include "llvm/GlobalVariable.h"
Owen Anderson45537912007-07-26 18:26:51 +000024#include "llvm/Function.h"
Devang Patelc64bc162009-03-06 02:59:27 +000025#include "llvm/IntrinsicInst.h"
Owen Andersond672ecb2009-07-03 00:17:18 +000026#include "llvm/LLVMContext.h"
Chris Lattnereed919b2009-09-21 05:57:11 +000027#include "llvm/Operator.h"
Owen Anderson45537912007-07-26 18:26:51 +000028#include "llvm/Value.h"
Owen Anderson1ad2cb72007-07-24 17:55:58 +000029#include "llvm/ADT/DenseMap.h"
30#include "llvm/ADT/DepthFirstIterator.h"
Owen Anderson255dafc2008-12-15 02:03:00 +000031#include "llvm/ADT/PostOrderIterator.h"
Owen Anderson1ad2cb72007-07-24 17:55:58 +000032#include "llvm/ADT/SmallPtrSet.h"
33#include "llvm/ADT/SmallVector.h"
34#include "llvm/ADT/Statistic.h"
Owen Andersonb388ca92007-10-18 19:39:33 +000035#include "llvm/Analysis/AliasAnalysis.h"
Chris Lattnerbc9a28d2009-12-06 05:29:56 +000036#include "llvm/Analysis/ConstantFolding.h"
37#include "llvm/Analysis/Dominators.h"
Dan Gohmandd9344f2010-05-28 16:19:17 +000038#include "llvm/Analysis/Loads.h"
Victor Hernandezf006b182009-10-27 20:05:49 +000039#include "llvm/Analysis/MemoryBuiltins.h"
Owen Anderson1ad2cb72007-07-24 17:55:58 +000040#include "llvm/Analysis/MemoryDependenceAnalysis.h"
Chris Lattner05e15f82009-12-09 01:59:31 +000041#include "llvm/Analysis/PHITransAddr.h"
Owen Anderson1ad2cb72007-07-24 17:55:58 +000042#include "llvm/Support/CFG.h"
Owen Andersonaa0b6342008-06-19 19:57:25 +000043#include "llvm/Support/CommandLine.h"
Chris Lattner9f8a6a72008-03-29 04:36:18 +000044#include "llvm/Support/Debug.h"
Torok Edwinc25e7582009-07-11 20:10:48 +000045#include "llvm/Support/ErrorHandling.h"
Chris Lattnereed919b2009-09-21 05:57:11 +000046#include "llvm/Support/GetElementPtrTypeIterator.h"
Chris Lattnerfaf815b2009-12-06 01:57:02 +000047#include "llvm/Support/IRBuilder.h"
Daniel Dunbarce63ffb2009-07-25 00:23:56 +000048#include "llvm/Support/raw_ostream.h"
Chris Lattnerbb6495c2009-09-20 19:03:47 +000049#include "llvm/Target/TargetData.h"
Owen Anderson5c274ee2008-06-19 19:54:19 +000050#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Dale Johannesen42c3f552009-06-17 20:48:23 +000051#include "llvm/Transforms/Utils/Local.h"
Chris Lattnera09fbf02009-10-10 23:50:30 +000052#include "llvm/Transforms/Utils/SSAUpdater.h"
Owen Anderson1ad2cb72007-07-24 17:55:58 +000053using namespace llvm;
54
Bill Wendling70ded192008-12-22 22:14:07 +000055STATISTIC(NumGVNInstr, "Number of instructions deleted");
56STATISTIC(NumGVNLoad, "Number of loads deleted");
57STATISTIC(NumGVNPRE, "Number of instructions PRE'd");
Owen Anderson961edc82008-07-15 16:28:06 +000058STATISTIC(NumGVNBlocks, "Number of blocks merged");
Bill Wendling70ded192008-12-22 22:14:07 +000059STATISTIC(NumPRELoad, "Number of loads PRE'd");
Chris Lattnerd27290d2008-03-22 04:13:49 +000060
Evan Cheng88d11c02008-06-20 01:01:07 +000061static cl::opt<bool> EnablePRE("enable-pre",
Owen Andersonc2b856e2008-07-17 19:41:00 +000062 cl::init(true), cl::Hidden);
Dan Gohmanc915c952009-06-15 18:30:15 +000063static cl::opt<bool> EnableLoadPRE("enable-load-pre", cl::init(true));
Bob Wilsonb29d7d22010-02-28 05:34:05 +000064static cl::opt<bool> EnableFullLoadPRE("enable-full-load-pre", cl::init(false));
Owen Andersonaa0b6342008-06-19 19:57:25 +000065
Owen Anderson1ad2cb72007-07-24 17:55:58 +000066//===----------------------------------------------------------------------===//
67// ValueTable Class
68//===----------------------------------------------------------------------===//
69
70/// This class holds the mapping between values and value numbers. It is used
71/// as an efficient mechanism to determine the expression-wise equivalence of
72/// two values.
73namespace {
Chris Lattner3e8b6632009-09-02 06:11:42 +000074 struct Expression {
Owen Andersona81e2412010-01-17 19:33:27 +000075 enum ExpressionOpcode {
76 ADD = Instruction::Add,
77 FADD = Instruction::FAdd,
78 SUB = Instruction::Sub,
79 FSUB = Instruction::FSub,
80 MUL = Instruction::Mul,
81 FMUL = Instruction::FMul,
82 UDIV = Instruction::UDiv,
83 SDIV = Instruction::SDiv,
84 FDIV = Instruction::FDiv,
85 UREM = Instruction::URem,
86 SREM = Instruction::SRem,
87 FREM = Instruction::FRem,
88 SHL = Instruction::Shl,
89 LSHR = Instruction::LShr,
90 ASHR = Instruction::AShr,
91 AND = Instruction::And,
92 OR = Instruction::Or,
93 XOR = Instruction::Xor,
94 TRUNC = Instruction::Trunc,
95 ZEXT = Instruction::ZExt,
96 SEXT = Instruction::SExt,
97 FPTOUI = Instruction::FPToUI,
98 FPTOSI = Instruction::FPToSI,
99 UITOFP = Instruction::UIToFP,
100 SITOFP = Instruction::SIToFP,
101 FPTRUNC = Instruction::FPTrunc,
102 FPEXT = Instruction::FPExt,
103 PTRTOINT = Instruction::PtrToInt,
104 INTTOPTR = Instruction::IntToPtr,
105 BITCAST = Instruction::BitCast,
106 ICMPEQ, ICMPNE, ICMPUGT, ICMPUGE, ICMPULT, ICMPULE,
107 ICMPSGT, ICMPSGE, ICMPSLT, ICMPSLE, FCMPOEQ,
108 FCMPOGT, FCMPOGE, FCMPOLT, FCMPOLE, FCMPONE,
109 FCMPORD, FCMPUNO, FCMPUEQ, FCMPUGT, FCMPUGE,
110 FCMPULT, FCMPULE, FCMPUNE, EXTRACT, INSERT,
111 SHUFFLE, SELECT, GEP, CALL, CONSTANT,
112 INSERTVALUE, EXTRACTVALUE, EMPTY, TOMBSTONE };
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000113
114 ExpressionOpcode opcode;
115 const Type* type;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000116 SmallVector<uint32_t, 4> varargs;
Chris Lattnerb2412a82009-09-21 02:42:51 +0000117 Value *function;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000118
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000119 Expression() { }
120 Expression(ExpressionOpcode o) : opcode(o) { }
Daniel Dunbara279bc32009-09-20 02:20:51 +0000121
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000122 bool operator==(const Expression &other) const {
123 if (opcode != other.opcode)
124 return false;
125 else if (opcode == EMPTY || opcode == TOMBSTONE)
126 return true;
127 else if (type != other.type)
128 return false;
Owen Andersonb388ca92007-10-18 19:39:33 +0000129 else if (function != other.function)
130 return false;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000131 else {
132 if (varargs.size() != other.varargs.size())
133 return false;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000134
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000135 for (size_t i = 0; i < varargs.size(); ++i)
136 if (varargs[i] != other.varargs[i])
137 return false;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000138
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000139 return true;
140 }
141 }
Daniel Dunbara279bc32009-09-20 02:20:51 +0000142
Chris Lattner17aa6802010-09-04 18:12:00 +0000143 /*bool operator!=(const Expression &other) const {
Bill Wendling75f02ee2008-12-22 22:16:31 +0000144 return !(*this == other);
Chris Lattner17aa6802010-09-04 18:12:00 +0000145 }*/
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000146 };
Daniel Dunbara279bc32009-09-20 02:20:51 +0000147
Chris Lattner3e8b6632009-09-02 06:11:42 +0000148 class ValueTable {
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000149 private:
150 DenseMap<Value*, uint32_t> valueNumbering;
151 DenseMap<Expression, uint32_t> expressionNumbering;
Owen Andersona472c4a2008-05-12 20:15:55 +0000152 AliasAnalysis* AA;
153 MemoryDependenceAnalysis* MD;
154 DominatorTree* DT;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000155
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000156 uint32_t nextValueNumber;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000157
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000158 Expression::ExpressionOpcode getOpcode(CmpInst* C);
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000159 Expression create_expression(BinaryOperator* BO);
160 Expression create_expression(CmpInst* C);
161 Expression create_expression(ShuffleVectorInst* V);
162 Expression create_expression(ExtractElementInst* C);
163 Expression create_expression(InsertElementInst* V);
164 Expression create_expression(SelectInst* V);
165 Expression create_expression(CastInst* C);
166 Expression create_expression(GetElementPtrInst* G);
Owen Andersonb388ca92007-10-18 19:39:33 +0000167 Expression create_expression(CallInst* C);
Owen Andersond41ed4e2009-10-19 22:14:22 +0000168 Expression create_expression(ExtractValueInst* C);
169 Expression create_expression(InsertValueInst* C);
170
171 uint32_t lookup_or_add_call(CallInst* C);
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000172 public:
Dan Gohmane63c4a22009-04-01 16:37:47 +0000173 ValueTable() : nextValueNumber(1) { }
Chris Lattnerb2412a82009-09-21 02:42:51 +0000174 uint32_t lookup_or_add(Value *V);
175 uint32_t lookup(Value *V) const;
176 void add(Value *V, uint32_t num);
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000177 void clear();
Chris Lattnerb2412a82009-09-21 02:42:51 +0000178 void erase(Value *v);
Owen Andersona472c4a2008-05-12 20:15:55 +0000179 void setAliasAnalysis(AliasAnalysis* A) { AA = A; }
Chris Lattner663e4412008-12-01 00:40:32 +0000180 AliasAnalysis *getAliasAnalysis() const { return AA; }
Owen Andersona472c4a2008-05-12 20:15:55 +0000181 void setMemDep(MemoryDependenceAnalysis* M) { MD = M; }
182 void setDomTree(DominatorTree* D) { DT = D; }
Owen Anderson0ae33ef2008-07-03 17:44:33 +0000183 uint32_t getNextUnusedValueNumber() { return nextValueNumber; }
Bill Wendling246dbbb2008-12-22 21:36:08 +0000184 void verifyRemoved(const Value *) const;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000185 };
186}
187
188namespace llvm {
Chris Lattner76c1b972007-09-17 18:34:04 +0000189template <> struct DenseMapInfo<Expression> {
Owen Anderson830db6a2007-08-02 18:16:06 +0000190 static inline Expression getEmptyKey() {
191 return Expression(Expression::EMPTY);
192 }
Daniel Dunbara279bc32009-09-20 02:20:51 +0000193
Owen Anderson830db6a2007-08-02 18:16:06 +0000194 static inline Expression getTombstoneKey() {
195 return Expression(Expression::TOMBSTONE);
196 }
Daniel Dunbara279bc32009-09-20 02:20:51 +0000197
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000198 static unsigned getHashValue(const Expression e) {
199 unsigned hash = e.opcode;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000200
Anton Korobeynikov07e6e562008-02-20 11:26:25 +0000201 hash = ((unsigned)((uintptr_t)e.type >> 4) ^
Owen Andersond41ed4e2009-10-19 22:14:22 +0000202 (unsigned)((uintptr_t)e.type >> 9));
Daniel Dunbara279bc32009-09-20 02:20:51 +0000203
Owen Anderson830db6a2007-08-02 18:16:06 +0000204 for (SmallVector<uint32_t, 4>::const_iterator I = e.varargs.begin(),
205 E = e.varargs.end(); I != E; ++I)
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000206 hash = *I + hash * 37;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000207
Anton Korobeynikov07e6e562008-02-20 11:26:25 +0000208 hash = ((unsigned)((uintptr_t)e.function >> 4) ^
209 (unsigned)((uintptr_t)e.function >> 9)) +
210 hash * 37;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000211
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000212 return hash;
213 }
Chris Lattner76c1b972007-09-17 18:34:04 +0000214 static bool isEqual(const Expression &LHS, const Expression &RHS) {
215 return LHS == RHS;
216 }
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000217};
Chris Lattner4bbf4ee2009-12-15 07:26:43 +0000218
219template <>
220struct isPodLike<Expression> { static const bool value = true; };
221
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000222}
223
224//===----------------------------------------------------------------------===//
225// ValueTable Internal Functions
226//===----------------------------------------------------------------------===//
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000227
228Expression::ExpressionOpcode ValueTable::getOpcode(CmpInst* C) {
Nick Lewycky7f6aa2b2009-07-08 03:04:38 +0000229 if (isa<ICmpInst>(C)) {
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000230 switch (C->getPredicate()) {
Chris Lattner88365bb2008-03-21 21:14:38 +0000231 default: // THIS SHOULD NEVER HAPPEN
Torok Edwinc23197a2009-07-14 16:55:14 +0000232 llvm_unreachable("Comparison with unknown predicate?");
Chris Lattner88365bb2008-03-21 21:14:38 +0000233 case ICmpInst::ICMP_EQ: return Expression::ICMPEQ;
234 case ICmpInst::ICMP_NE: return Expression::ICMPNE;
235 case ICmpInst::ICMP_UGT: return Expression::ICMPUGT;
236 case ICmpInst::ICMP_UGE: return Expression::ICMPUGE;
237 case ICmpInst::ICMP_ULT: return Expression::ICMPULT;
238 case ICmpInst::ICMP_ULE: return Expression::ICMPULE;
239 case ICmpInst::ICMP_SGT: return Expression::ICMPSGT;
240 case ICmpInst::ICMP_SGE: return Expression::ICMPSGE;
241 case ICmpInst::ICMP_SLT: return Expression::ICMPSLT;
242 case ICmpInst::ICMP_SLE: return Expression::ICMPSLE;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000243 }
Nick Lewycky7f6aa2b2009-07-08 03:04:38 +0000244 } else {
245 switch (C->getPredicate()) {
246 default: // THIS SHOULD NEVER HAPPEN
Torok Edwinc23197a2009-07-14 16:55:14 +0000247 llvm_unreachable("Comparison with unknown predicate?");
Nick Lewycky7f6aa2b2009-07-08 03:04:38 +0000248 case FCmpInst::FCMP_OEQ: return Expression::FCMPOEQ;
249 case FCmpInst::FCMP_OGT: return Expression::FCMPOGT;
250 case FCmpInst::FCMP_OGE: return Expression::FCMPOGE;
251 case FCmpInst::FCMP_OLT: return Expression::FCMPOLT;
252 case FCmpInst::FCMP_OLE: return Expression::FCMPOLE;
253 case FCmpInst::FCMP_ONE: return Expression::FCMPONE;
254 case FCmpInst::FCMP_ORD: return Expression::FCMPORD;
255 case FCmpInst::FCMP_UNO: return Expression::FCMPUNO;
256 case FCmpInst::FCMP_UEQ: return Expression::FCMPUEQ;
257 case FCmpInst::FCMP_UGT: return Expression::FCMPUGT;
258 case FCmpInst::FCMP_UGE: return Expression::FCMPUGE;
259 case FCmpInst::FCMP_ULT: return Expression::FCMPULT;
260 case FCmpInst::FCMP_ULE: return Expression::FCMPULE;
261 case FCmpInst::FCMP_UNE: return Expression::FCMPUNE;
262 }
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000263 }
264}
265
Owen Andersonb388ca92007-10-18 19:39:33 +0000266Expression ValueTable::create_expression(CallInst* C) {
267 Expression e;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000268
Owen Andersonb388ca92007-10-18 19:39:33 +0000269 e.type = C->getType();
Owen Andersonb388ca92007-10-18 19:39:33 +0000270 e.function = C->getCalledFunction();
271 e.opcode = Expression::CALL;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000272
Gabor Greif0a14be02010-06-24 10:04:07 +0000273 CallSite CS(C);
274 for (CallInst::op_iterator I = CS.arg_begin(), E = CS.arg_end();
Owen Andersonb388ca92007-10-18 19:39:33 +0000275 I != E; ++I)
Owen Anderson8f46c782008-04-11 05:11:49 +0000276 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbara279bc32009-09-20 02:20:51 +0000277
Owen Andersonb388ca92007-10-18 19:39:33 +0000278 return e;
279}
280
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000281Expression ValueTable::create_expression(BinaryOperator* BO) {
282 Expression e;
Owen Andersond41ed4e2009-10-19 22:14:22 +0000283 e.varargs.push_back(lookup_or_add(BO->getOperand(0)));
284 e.varargs.push_back(lookup_or_add(BO->getOperand(1)));
Owen Andersonb388ca92007-10-18 19:39:33 +0000285 e.function = 0;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000286 e.type = BO->getType();
Owen Andersona81e2412010-01-17 19:33:27 +0000287 e.opcode = static_cast<Expression::ExpressionOpcode>(BO->getOpcode());
Daniel Dunbara279bc32009-09-20 02:20:51 +0000288
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000289 return e;
290}
291
292Expression ValueTable::create_expression(CmpInst* C) {
293 Expression e;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000294
Owen Andersond41ed4e2009-10-19 22:14:22 +0000295 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
296 e.varargs.push_back(lookup_or_add(C->getOperand(1)));
Owen Andersonb388ca92007-10-18 19:39:33 +0000297 e.function = 0;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000298 e.type = C->getType();
299 e.opcode = getOpcode(C);
Daniel Dunbara279bc32009-09-20 02:20:51 +0000300
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000301 return e;
302}
303
304Expression ValueTable::create_expression(CastInst* C) {
305 Expression e;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000306
Owen Andersond41ed4e2009-10-19 22:14:22 +0000307 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
Owen Andersonb388ca92007-10-18 19:39:33 +0000308 e.function = 0;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000309 e.type = C->getType();
Owen Andersona81e2412010-01-17 19:33:27 +0000310 e.opcode = static_cast<Expression::ExpressionOpcode>(C->getOpcode());
Daniel Dunbara279bc32009-09-20 02:20:51 +0000311
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000312 return e;
313}
314
315Expression ValueTable::create_expression(ShuffleVectorInst* S) {
316 Expression e;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000317
Owen Andersond41ed4e2009-10-19 22:14:22 +0000318 e.varargs.push_back(lookup_or_add(S->getOperand(0)));
319 e.varargs.push_back(lookup_or_add(S->getOperand(1)));
320 e.varargs.push_back(lookup_or_add(S->getOperand(2)));
Owen Andersonb388ca92007-10-18 19:39:33 +0000321 e.function = 0;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000322 e.type = S->getType();
323 e.opcode = Expression::SHUFFLE;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000324
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000325 return e;
326}
327
328Expression ValueTable::create_expression(ExtractElementInst* E) {
329 Expression e;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000330
Owen Andersond41ed4e2009-10-19 22:14:22 +0000331 e.varargs.push_back(lookup_or_add(E->getOperand(0)));
332 e.varargs.push_back(lookup_or_add(E->getOperand(1)));
Owen Andersonb388ca92007-10-18 19:39:33 +0000333 e.function = 0;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000334 e.type = E->getType();
335 e.opcode = Expression::EXTRACT;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000336
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000337 return e;
338}
339
340Expression ValueTable::create_expression(InsertElementInst* I) {
341 Expression e;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000342
Owen Andersond41ed4e2009-10-19 22:14:22 +0000343 e.varargs.push_back(lookup_or_add(I->getOperand(0)));
344 e.varargs.push_back(lookup_or_add(I->getOperand(1)));
345 e.varargs.push_back(lookup_or_add(I->getOperand(2)));
Owen Andersonb388ca92007-10-18 19:39:33 +0000346 e.function = 0;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000347 e.type = I->getType();
348 e.opcode = Expression::INSERT;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000349
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000350 return e;
351}
352
353Expression ValueTable::create_expression(SelectInst* I) {
354 Expression e;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000355
Owen Andersond41ed4e2009-10-19 22:14:22 +0000356 e.varargs.push_back(lookup_or_add(I->getCondition()));
357 e.varargs.push_back(lookup_or_add(I->getTrueValue()));
358 e.varargs.push_back(lookup_or_add(I->getFalseValue()));
Owen Andersonb388ca92007-10-18 19:39:33 +0000359 e.function = 0;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000360 e.type = I->getType();
361 e.opcode = Expression::SELECT;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000362
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000363 return e;
364}
365
366Expression ValueTable::create_expression(GetElementPtrInst* G) {
367 Expression e;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000368
Owen Andersond41ed4e2009-10-19 22:14:22 +0000369 e.varargs.push_back(lookup_or_add(G->getPointerOperand()));
Owen Andersonb388ca92007-10-18 19:39:33 +0000370 e.function = 0;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000371 e.type = G->getType();
372 e.opcode = Expression::GEP;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000373
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000374 for (GetElementPtrInst::op_iterator I = G->idx_begin(), E = G->idx_end();
375 I != E; ++I)
Owen Anderson8f46c782008-04-11 05:11:49 +0000376 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbara279bc32009-09-20 02:20:51 +0000377
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000378 return e;
379}
380
Owen Andersond41ed4e2009-10-19 22:14:22 +0000381Expression ValueTable::create_expression(ExtractValueInst* E) {
382 Expression e;
383
384 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
385 for (ExtractValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
386 II != IE; ++II)
387 e.varargs.push_back(*II);
388 e.function = 0;
389 e.type = E->getType();
390 e.opcode = Expression::EXTRACTVALUE;
391
392 return e;
393}
394
395Expression ValueTable::create_expression(InsertValueInst* E) {
396 Expression e;
397
398 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
399 e.varargs.push_back(lookup_or_add(E->getInsertedValueOperand()));
400 for (InsertValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
401 II != IE; ++II)
402 e.varargs.push_back(*II);
403 e.function = 0;
404 e.type = E->getType();
405 e.opcode = Expression::INSERTVALUE;
406
407 return e;
408}
409
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000410//===----------------------------------------------------------------------===//
411// ValueTable External Functions
412//===----------------------------------------------------------------------===//
413
Owen Andersonb2303722008-06-18 21:41:49 +0000414/// add - Insert a value into the table with a specified value number.
Chris Lattnerb2412a82009-09-21 02:42:51 +0000415void ValueTable::add(Value *V, uint32_t num) {
Owen Andersonb2303722008-06-18 21:41:49 +0000416 valueNumbering.insert(std::make_pair(V, num));
417}
418
Owen Andersond41ed4e2009-10-19 22:14:22 +0000419uint32_t ValueTable::lookup_or_add_call(CallInst* C) {
420 if (AA->doesNotAccessMemory(C)) {
421 Expression exp = create_expression(C);
422 uint32_t& e = expressionNumbering[exp];
423 if (!e) e = nextValueNumber++;
424 valueNumbering[C] = e;
425 return e;
426 } else if (AA->onlyReadsMemory(C)) {
427 Expression exp = create_expression(C);
428 uint32_t& e = expressionNumbering[exp];
429 if (!e) {
430 e = nextValueNumber++;
431 valueNumbering[C] = e;
432 return e;
433 }
Dan Gohman4ec01b22009-11-14 02:27:51 +0000434 if (!MD) {
435 e = nextValueNumber++;
436 valueNumbering[C] = e;
437 return e;
438 }
Owen Andersond41ed4e2009-10-19 22:14:22 +0000439
440 MemDepResult local_dep = MD->getDependency(C);
441
442 if (!local_dep.isDef() && !local_dep.isNonLocal()) {
443 valueNumbering[C] = nextValueNumber;
444 return nextValueNumber++;
445 }
446
447 if (local_dep.isDef()) {
448 CallInst* local_cdep = cast<CallInst>(local_dep.getInst());
449
Gabor Greif237e1da2010-06-30 09:17:53 +0000450 if (local_cdep->getNumArgOperands() != C->getNumArgOperands()) {
Owen Andersond41ed4e2009-10-19 22:14:22 +0000451 valueNumbering[C] = nextValueNumber;
452 return nextValueNumber++;
453 }
454
Gabor Greifd883a9d2010-06-24 10:17:17 +0000455 for (unsigned i = 0, e = C->getNumArgOperands(); i < e; ++i) {
456 uint32_t c_vn = lookup_or_add(C->getArgOperand(i));
457 uint32_t cd_vn = lookup_or_add(local_cdep->getArgOperand(i));
Owen Andersond41ed4e2009-10-19 22:14:22 +0000458 if (c_vn != cd_vn) {
459 valueNumbering[C] = nextValueNumber;
460 return nextValueNumber++;
461 }
462 }
463
464 uint32_t v = lookup_or_add(local_cdep);
465 valueNumbering[C] = v;
466 return v;
467 }
468
469 // Non-local case.
470 const MemoryDependenceAnalysis::NonLocalDepInfo &deps =
471 MD->getNonLocalCallDependency(CallSite(C));
472 // FIXME: call/call dependencies for readonly calls should return def, not
473 // clobber! Move the checking logic to MemDep!
474 CallInst* cdep = 0;
475
476 // Check to see if we have a single dominating call instruction that is
477 // identical to C.
478 for (unsigned i = 0, e = deps.size(); i != e; ++i) {
Chris Lattnere18b9712009-12-09 07:08:01 +0000479 const NonLocalDepEntry *I = &deps[i];
Owen Andersond41ed4e2009-10-19 22:14:22 +0000480 // Ignore non-local dependencies.
Chris Lattnere18b9712009-12-09 07:08:01 +0000481 if (I->getResult().isNonLocal())
Owen Andersond41ed4e2009-10-19 22:14:22 +0000482 continue;
483
484 // We don't handle non-depedencies. If we already have a call, reject
485 // instruction dependencies.
Chris Lattnere18b9712009-12-09 07:08:01 +0000486 if (I->getResult().isClobber() || cdep != 0) {
Owen Andersond41ed4e2009-10-19 22:14:22 +0000487 cdep = 0;
488 break;
489 }
490
Chris Lattnere18b9712009-12-09 07:08:01 +0000491 CallInst *NonLocalDepCall = dyn_cast<CallInst>(I->getResult().getInst());
Owen Andersond41ed4e2009-10-19 22:14:22 +0000492 // FIXME: All duplicated with non-local case.
Chris Lattnere18b9712009-12-09 07:08:01 +0000493 if (NonLocalDepCall && DT->properlyDominates(I->getBB(), C->getParent())){
Owen Andersond41ed4e2009-10-19 22:14:22 +0000494 cdep = NonLocalDepCall;
495 continue;
496 }
497
498 cdep = 0;
499 break;
500 }
501
502 if (!cdep) {
503 valueNumbering[C] = nextValueNumber;
504 return nextValueNumber++;
505 }
506
Gabor Greif237e1da2010-06-30 09:17:53 +0000507 if (cdep->getNumArgOperands() != C->getNumArgOperands()) {
Owen Andersond41ed4e2009-10-19 22:14:22 +0000508 valueNumbering[C] = nextValueNumber;
509 return nextValueNumber++;
510 }
Gabor Greifd883a9d2010-06-24 10:17:17 +0000511 for (unsigned i = 0, e = C->getNumArgOperands(); i < e; ++i) {
512 uint32_t c_vn = lookup_or_add(C->getArgOperand(i));
513 uint32_t cd_vn = lookup_or_add(cdep->getArgOperand(i));
Owen Andersond41ed4e2009-10-19 22:14:22 +0000514 if (c_vn != cd_vn) {
515 valueNumbering[C] = nextValueNumber;
516 return nextValueNumber++;
517 }
518 }
519
520 uint32_t v = lookup_or_add(cdep);
521 valueNumbering[C] = v;
522 return v;
523
524 } else {
525 valueNumbering[C] = nextValueNumber;
526 return nextValueNumber++;
527 }
528}
529
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000530/// lookup_or_add - Returns the value number for the specified value, assigning
531/// it a new number if it did not have one before.
Chris Lattnerb2412a82009-09-21 02:42:51 +0000532uint32_t ValueTable::lookup_or_add(Value *V) {
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000533 DenseMap<Value*, uint32_t>::iterator VI = valueNumbering.find(V);
534 if (VI != valueNumbering.end())
535 return VI->second;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000536
Owen Andersond41ed4e2009-10-19 22:14:22 +0000537 if (!isa<Instruction>(V)) {
Owen Anderson158d86e2009-10-19 21:14:57 +0000538 valueNumbering[V] = nextValueNumber;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000539 return nextValueNumber++;
540 }
Owen Andersond41ed4e2009-10-19 22:14:22 +0000541
542 Instruction* I = cast<Instruction>(V);
543 Expression exp;
544 switch (I->getOpcode()) {
545 case Instruction::Call:
546 return lookup_or_add_call(cast<CallInst>(I));
547 case Instruction::Add:
548 case Instruction::FAdd:
549 case Instruction::Sub:
550 case Instruction::FSub:
551 case Instruction::Mul:
552 case Instruction::FMul:
553 case Instruction::UDiv:
554 case Instruction::SDiv:
555 case Instruction::FDiv:
556 case Instruction::URem:
557 case Instruction::SRem:
558 case Instruction::FRem:
559 case Instruction::Shl:
560 case Instruction::LShr:
561 case Instruction::AShr:
562 case Instruction::And:
563 case Instruction::Or :
564 case Instruction::Xor:
565 exp = create_expression(cast<BinaryOperator>(I));
566 break;
567 case Instruction::ICmp:
568 case Instruction::FCmp:
569 exp = create_expression(cast<CmpInst>(I));
570 break;
571 case Instruction::Trunc:
572 case Instruction::ZExt:
573 case Instruction::SExt:
574 case Instruction::FPToUI:
575 case Instruction::FPToSI:
576 case Instruction::UIToFP:
577 case Instruction::SIToFP:
578 case Instruction::FPTrunc:
579 case Instruction::FPExt:
580 case Instruction::PtrToInt:
581 case Instruction::IntToPtr:
582 case Instruction::BitCast:
583 exp = create_expression(cast<CastInst>(I));
584 break;
585 case Instruction::Select:
586 exp = create_expression(cast<SelectInst>(I));
587 break;
588 case Instruction::ExtractElement:
589 exp = create_expression(cast<ExtractElementInst>(I));
590 break;
591 case Instruction::InsertElement:
592 exp = create_expression(cast<InsertElementInst>(I));
593 break;
594 case Instruction::ShuffleVector:
595 exp = create_expression(cast<ShuffleVectorInst>(I));
596 break;
597 case Instruction::ExtractValue:
598 exp = create_expression(cast<ExtractValueInst>(I));
599 break;
600 case Instruction::InsertValue:
601 exp = create_expression(cast<InsertValueInst>(I));
602 break;
603 case Instruction::GetElementPtr:
604 exp = create_expression(cast<GetElementPtrInst>(I));
605 break;
606 default:
607 valueNumbering[V] = nextValueNumber;
608 return nextValueNumber++;
609 }
610
611 uint32_t& e = expressionNumbering[exp];
612 if (!e) e = nextValueNumber++;
613 valueNumbering[V] = e;
614 return e;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000615}
616
617/// lookup - Returns the value number of the specified value. Fails if
618/// the value has not yet been numbered.
Chris Lattnerb2412a82009-09-21 02:42:51 +0000619uint32_t ValueTable::lookup(Value *V) const {
Jeffrey Yasskin81cf4322009-11-10 01:02:17 +0000620 DenseMap<Value*, uint32_t>::const_iterator VI = valueNumbering.find(V);
Chris Lattner88365bb2008-03-21 21:14:38 +0000621 assert(VI != valueNumbering.end() && "Value not numbered?");
622 return VI->second;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000623}
624
625/// clear - Remove all entries from the ValueTable
626void ValueTable::clear() {
627 valueNumbering.clear();
628 expressionNumbering.clear();
629 nextValueNumber = 1;
630}
631
Owen Andersonbf7d0bc2007-07-31 23:27:13 +0000632/// erase - Remove a value from the value numbering
Chris Lattnerb2412a82009-09-21 02:42:51 +0000633void ValueTable::erase(Value *V) {
Owen Andersonbf7d0bc2007-07-31 23:27:13 +0000634 valueNumbering.erase(V);
635}
636
Bill Wendling246dbbb2008-12-22 21:36:08 +0000637/// verifyRemoved - Verify that the value is removed from all internal data
638/// structures.
639void ValueTable::verifyRemoved(const Value *V) const {
Jeffrey Yasskin81cf4322009-11-10 01:02:17 +0000640 for (DenseMap<Value*, uint32_t>::const_iterator
Bill Wendling246dbbb2008-12-22 21:36:08 +0000641 I = valueNumbering.begin(), E = valueNumbering.end(); I != E; ++I) {
642 assert(I->first != V && "Inst still occurs in value numbering map!");
643 }
644}
645
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000646//===----------------------------------------------------------------------===//
Bill Wendling30788b82008-12-22 22:32:22 +0000647// GVN Pass
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000648//===----------------------------------------------------------------------===//
649
650namespace {
Chris Lattner3e8b6632009-09-02 06:11:42 +0000651 struct ValueNumberScope {
Owen Anderson6fafe842008-06-20 01:15:47 +0000652 ValueNumberScope* parent;
653 DenseMap<uint32_t, Value*> table;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000654
Owen Anderson6fafe842008-06-20 01:15:47 +0000655 ValueNumberScope(ValueNumberScope* p) : parent(p) { }
656 };
657}
658
659namespace {
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000660
Chris Lattner3e8b6632009-09-02 06:11:42 +0000661 class GVN : public FunctionPass {
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000662 bool runOnFunction(Function &F);
663 public:
664 static char ID; // Pass identification, replacement for typeid
Bob Wilsonb29d7d22010-02-28 05:34:05 +0000665 explicit GVN(bool noloads = false)
Owen Anderson90c579d2010-08-06 18:33:48 +0000666 : FunctionPass(ID), NoLoads(noloads), MD(0) { }
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000667
668 private:
Dan Gohman4ec01b22009-11-14 02:27:51 +0000669 bool NoLoads;
Chris Lattner663e4412008-12-01 00:40:32 +0000670 MemoryDependenceAnalysis *MD;
671 DominatorTree *DT;
672
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000673 ValueTable VN;
Owen Anderson6fafe842008-06-20 01:15:47 +0000674 DenseMap<BasicBlock*, ValueNumberScope*> localAvail;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000675
Bob Wilson484d4a32010-02-16 19:51:59 +0000676 // List of critical edges to be split between iterations.
677 SmallVector<std::pair<TerminatorInst*, unsigned>, 4> toSplit;
678
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000679 // This transformation requires dominator postdominator info
680 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000681 AU.addRequired<DominatorTree>();
Dan Gohman4ec01b22009-11-14 02:27:51 +0000682 if (!NoLoads)
683 AU.addRequired<MemoryDependenceAnalysis>();
Owen Andersonb388ca92007-10-18 19:39:33 +0000684 AU.addRequired<AliasAnalysis>();
Daniel Dunbara279bc32009-09-20 02:20:51 +0000685
Owen Andersonb70a5712008-06-23 17:49:45 +0000686 AU.addPreserved<DominatorTree>();
Owen Andersonb388ca92007-10-18 19:39:33 +0000687 AU.addPreserved<AliasAnalysis>();
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000688 }
Daniel Dunbara279bc32009-09-20 02:20:51 +0000689
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000690 // Helper fuctions
691 // FIXME: eliminate or document these better
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000692 bool processLoad(LoadInst* L,
Chris Lattner8e1e95c2008-03-21 22:01:16 +0000693 SmallVectorImpl<Instruction*> &toErase);
Chris Lattnerb2412a82009-09-21 02:42:51 +0000694 bool processInstruction(Instruction *I,
Chris Lattner8e1e95c2008-03-21 22:01:16 +0000695 SmallVectorImpl<Instruction*> &toErase);
Owen Anderson830db6a2007-08-02 18:16:06 +0000696 bool processNonLocalLoad(LoadInst* L,
Chris Lattner8e1e95c2008-03-21 22:01:16 +0000697 SmallVectorImpl<Instruction*> &toErase);
Chris Lattnerb2412a82009-09-21 02:42:51 +0000698 bool processBlock(BasicBlock *BB);
Owen Andersonb2303722008-06-18 21:41:49 +0000699 void dump(DenseMap<uint32_t, Value*>& d);
Owen Anderson3e75a422007-08-14 18:04:11 +0000700 bool iterateOnFunction(Function &F);
Chris Lattnerb2412a82009-09-21 02:42:51 +0000701 Value *CollapsePhi(PHINode* p);
Owen Andersonb2303722008-06-18 21:41:49 +0000702 bool performPRE(Function& F);
Chris Lattnerb2412a82009-09-21 02:42:51 +0000703 Value *lookupNumber(BasicBlock *BB, uint32_t num);
Nuno Lopes7cdd9ee2008-10-10 16:25:50 +0000704 void cleanupGlobalSets();
Bill Wendling246dbbb2008-12-22 21:36:08 +0000705 void verifyRemoved(const Instruction *I) const;
Bob Wilson484d4a32010-02-16 19:51:59 +0000706 bool splitCriticalEdges();
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000707 };
Daniel Dunbara279bc32009-09-20 02:20:51 +0000708
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000709 char GVN::ID = 0;
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000710}
711
712// createGVNPass - The public interface to this file...
Bob Wilsonb29d7d22010-02-28 05:34:05 +0000713FunctionPass *llvm::createGVNPass(bool NoLoads) {
714 return new GVN(NoLoads);
Dan Gohman4ec01b22009-11-14 02:27:51 +0000715}
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000716
Owen Andersond13db2c2010-07-21 22:09:45 +0000717INITIALIZE_PASS(GVN, "gvn", "Global Value Numbering", false, false);
Owen Anderson1ad2cb72007-07-24 17:55:58 +0000718
Owen Andersonb2303722008-06-18 21:41:49 +0000719void GVN::dump(DenseMap<uint32_t, Value*>& d) {
Dan Gohmanad12b262009-12-18 03:25:51 +0000720 errs() << "{\n";
Owen Andersonb2303722008-06-18 21:41:49 +0000721 for (DenseMap<uint32_t, Value*>::iterator I = d.begin(),
Owen Anderson0cd32032007-07-25 19:57:03 +0000722 E = d.end(); I != E; ++I) {
Dan Gohmanad12b262009-12-18 03:25:51 +0000723 errs() << I->first << "\n";
Owen Anderson0cd32032007-07-25 19:57:03 +0000724 I->second->dump();
725 }
Dan Gohmanad12b262009-12-18 03:25:51 +0000726 errs() << "}\n";
Owen Anderson0cd32032007-07-25 19:57:03 +0000727}
728
Chris Lattnerb2412a82009-09-21 02:42:51 +0000729static bool isSafeReplacement(PHINode* p, Instruction *inst) {
Owen Anderson4eebf0b2009-08-26 22:55:11 +0000730 if (!isa<PHINode>(inst))
731 return true;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000732
Owen Anderson4eebf0b2009-08-26 22:55:11 +0000733 for (Instruction::use_iterator UI = p->use_begin(), E = p->use_end();
734 UI != E; ++UI)
Gabor Greif96f1d8e2010-07-22 13:36:47 +0000735 if (PHINode* use_phi = dyn_cast<PHINode>(*UI))
Owen Anderson4eebf0b2009-08-26 22:55:11 +0000736 if (use_phi->getParent() == inst->getParent())
737 return false;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000738
Owen Anderson4eebf0b2009-08-26 22:55:11 +0000739 return true;
740}
741
Chris Lattnerb2412a82009-09-21 02:42:51 +0000742Value *GVN::CollapsePhi(PHINode *PN) {
743 Value *ConstVal = PN->hasConstantValue(DT);
744 if (!ConstVal) return 0;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000745
Chris Lattnerb2412a82009-09-21 02:42:51 +0000746 Instruction *Inst = dyn_cast<Instruction>(ConstVal);
747 if (!Inst)
748 return ConstVal;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000749
Chris Lattnerb2412a82009-09-21 02:42:51 +0000750 if (DT->dominates(Inst, PN))
751 if (isSafeReplacement(PN, Inst))
752 return Inst;
Owen Anderson1defe2d2007-08-16 22:51:56 +0000753 return 0;
754}
Owen Anderson0cd32032007-07-25 19:57:03 +0000755
Chris Lattnerc89c6a92008-12-02 08:16:11 +0000756/// IsValueFullyAvailableInBlock - Return true if we can prove that the value
757/// we're analyzing is fully available in the specified block. As we go, keep
Chris Lattner72bc70d2008-12-05 07:49:08 +0000758/// track of which blocks we know are fully alive in FullyAvailableBlocks. This
759/// map is actually a tri-state map with the following values:
760/// 0) we know the block *is not* fully available.
761/// 1) we know the block *is* fully available.
762/// 2) we do not know whether the block is fully available or not, but we are
763/// currently speculating that it will be.
764/// 3) we are speculating for this block and have used that to speculate for
765/// other blocks.
Daniel Dunbara279bc32009-09-20 02:20:51 +0000766static bool IsValueFullyAvailableInBlock(BasicBlock *BB,
Chris Lattner72bc70d2008-12-05 07:49:08 +0000767 DenseMap<BasicBlock*, char> &FullyAvailableBlocks) {
Chris Lattnerc89c6a92008-12-02 08:16:11 +0000768 // Optimistically assume that the block is fully available and check to see
769 // if we already know about this block in one lookup.
Daniel Dunbara279bc32009-09-20 02:20:51 +0000770 std::pair<DenseMap<BasicBlock*, char>::iterator, char> IV =
Chris Lattner72bc70d2008-12-05 07:49:08 +0000771 FullyAvailableBlocks.insert(std::make_pair(BB, 2));
Chris Lattnerc89c6a92008-12-02 08:16:11 +0000772
773 // If the entry already existed for this block, return the precomputed value.
Chris Lattner72bc70d2008-12-05 07:49:08 +0000774 if (!IV.second) {
775 // If this is a speculative "available" value, mark it as being used for
776 // speculation of other blocks.
777 if (IV.first->second == 2)
778 IV.first->second = 3;
779 return IV.first->second != 0;
780 }
Daniel Dunbara279bc32009-09-20 02:20:51 +0000781
Chris Lattnerc89c6a92008-12-02 08:16:11 +0000782 // Otherwise, see if it is fully available in all predecessors.
783 pred_iterator PI = pred_begin(BB), PE = pred_end(BB);
Daniel Dunbara279bc32009-09-20 02:20:51 +0000784
Chris Lattnerc89c6a92008-12-02 08:16:11 +0000785 // If this block has no predecessors, it isn't live-in here.
786 if (PI == PE)
Chris Lattner72bc70d2008-12-05 07:49:08 +0000787 goto SpeculationFailure;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000788
Chris Lattnerc89c6a92008-12-02 08:16:11 +0000789 for (; PI != PE; ++PI)
790 // If the value isn't fully available in one of our predecessors, then it
791 // isn't fully available in this block either. Undo our previous
792 // optimistic assumption and bail out.
793 if (!IsValueFullyAvailableInBlock(*PI, FullyAvailableBlocks))
Chris Lattner72bc70d2008-12-05 07:49:08 +0000794 goto SpeculationFailure;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000795
Chris Lattnerc89c6a92008-12-02 08:16:11 +0000796 return true;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000797
Chris Lattner72bc70d2008-12-05 07:49:08 +0000798// SpeculationFailure - If we get here, we found out that this is not, after
799// all, a fully-available block. We have a problem if we speculated on this and
800// used the speculation to mark other blocks as available.
801SpeculationFailure:
802 char &BBVal = FullyAvailableBlocks[BB];
Daniel Dunbara279bc32009-09-20 02:20:51 +0000803
Chris Lattner72bc70d2008-12-05 07:49:08 +0000804 // If we didn't speculate on this, just return with it set to false.
805 if (BBVal == 2) {
806 BBVal = 0;
807 return false;
808 }
809
810 // If we did speculate on this value, we could have blocks set to 1 that are
811 // incorrect. Walk the (transitive) successors of this block and mark them as
812 // 0 if set to one.
813 SmallVector<BasicBlock*, 32> BBWorklist;
814 BBWorklist.push_back(BB);
Daniel Dunbara279bc32009-09-20 02:20:51 +0000815
Dan Gohman321a8132010-01-05 16:27:25 +0000816 do {
Chris Lattner72bc70d2008-12-05 07:49:08 +0000817 BasicBlock *Entry = BBWorklist.pop_back_val();
818 // Note that this sets blocks to 0 (unavailable) if they happen to not
819 // already be in FullyAvailableBlocks. This is safe.
820 char &EntryVal = FullyAvailableBlocks[Entry];
821 if (EntryVal == 0) continue; // Already unavailable.
822
823 // Mark as unavailable.
824 EntryVal = 0;
Daniel Dunbara279bc32009-09-20 02:20:51 +0000825
Chris Lattner72bc70d2008-12-05 07:49:08 +0000826 for (succ_iterator I = succ_begin(Entry), E = succ_end(Entry); I != E; ++I)
827 BBWorklist.push_back(*I);
Dan Gohman321a8132010-01-05 16:27:25 +0000828 } while (!BBWorklist.empty());
Daniel Dunbara279bc32009-09-20 02:20:51 +0000829
Chris Lattner72bc70d2008-12-05 07:49:08 +0000830 return false;
Chris Lattnerc89c6a92008-12-02 08:16:11 +0000831}
832
Chris Lattner771a5422009-09-20 20:09:34 +0000833
Chris Lattner8b2bc3d2009-09-21 17:24:04 +0000834/// CanCoerceMustAliasedValueToLoad - Return true if
835/// CoerceAvailableValueToLoadType will succeed.
836static bool CanCoerceMustAliasedValueToLoad(Value *StoredVal,
837 const Type *LoadTy,
838 const TargetData &TD) {
839 // If the loaded or stored value is an first class array or struct, don't try
840 // to transform them. We need to be able to bitcast to integer.
Duncan Sands1df98592010-02-16 11:11:14 +0000841 if (LoadTy->isStructTy() || LoadTy->isArrayTy() ||
842 StoredVal->getType()->isStructTy() ||
843 StoredVal->getType()->isArrayTy())
Chris Lattner8b2bc3d2009-09-21 17:24:04 +0000844 return false;
845
846 // The store has to be at least as big as the load.
847 if (TD.getTypeSizeInBits(StoredVal->getType()) <
848 TD.getTypeSizeInBits(LoadTy))
849 return false;
850
851 return true;
852}
853
854
Chris Lattner771a5422009-09-20 20:09:34 +0000855/// CoerceAvailableValueToLoadType - If we saw a store of a value to memory, and
856/// then a load from a must-aliased pointer of a different type, try to coerce
857/// the stored value. LoadedTy is the type of the load we want to replace and
858/// InsertPt is the place to insert new instructions.
859///
860/// If we can't do it, return null.
861static Value *CoerceAvailableValueToLoadType(Value *StoredVal,
862 const Type *LoadedTy,
863 Instruction *InsertPt,
864 const TargetData &TD) {
Chris Lattner8b2bc3d2009-09-21 17:24:04 +0000865 if (!CanCoerceMustAliasedValueToLoad(StoredVal, LoadedTy, TD))
866 return 0;
867
Chris Lattner771a5422009-09-20 20:09:34 +0000868 const Type *StoredValTy = StoredVal->getType();
869
Chris Lattner7944c212010-05-08 20:01:44 +0000870 uint64_t StoreSize = TD.getTypeStoreSizeInBits(StoredValTy);
Chris Lattner771a5422009-09-20 20:09:34 +0000871 uint64_t LoadSize = TD.getTypeSizeInBits(LoadedTy);
872
873 // If the store and reload are the same size, we can always reuse it.
874 if (StoreSize == LoadSize) {
Duncan Sands1df98592010-02-16 11:11:14 +0000875 if (StoredValTy->isPointerTy() && LoadedTy->isPointerTy()) {
Chris Lattner771a5422009-09-20 20:09:34 +0000876 // Pointer to Pointer -> use bitcast.
877 return new BitCastInst(StoredVal, LoadedTy, "", InsertPt);
878 }
879
880 // Convert source pointers to integers, which can be bitcast.
Duncan Sands1df98592010-02-16 11:11:14 +0000881 if (StoredValTy->isPointerTy()) {
Chris Lattner771a5422009-09-20 20:09:34 +0000882 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
883 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
884 }
885
886 const Type *TypeToCastTo = LoadedTy;
Duncan Sands1df98592010-02-16 11:11:14 +0000887 if (TypeToCastTo->isPointerTy())
Chris Lattner771a5422009-09-20 20:09:34 +0000888 TypeToCastTo = TD.getIntPtrType(StoredValTy->getContext());
889
890 if (StoredValTy != TypeToCastTo)
891 StoredVal = new BitCastInst(StoredVal, TypeToCastTo, "", InsertPt);
892
893 // Cast to pointer if the load needs a pointer type.
Duncan Sands1df98592010-02-16 11:11:14 +0000894 if (LoadedTy->isPointerTy())
Chris Lattner771a5422009-09-20 20:09:34 +0000895 StoredVal = new IntToPtrInst(StoredVal, LoadedTy, "", InsertPt);
896
897 return StoredVal;
898 }
899
900 // If the loaded value is smaller than the available value, then we can
901 // extract out a piece from it. If the available value is too small, then we
902 // can't do anything.
Chris Lattner8b2bc3d2009-09-21 17:24:04 +0000903 assert(StoreSize >= LoadSize && "CanCoerceMustAliasedValueToLoad fail");
Chris Lattner771a5422009-09-20 20:09:34 +0000904
905 // Convert source pointers to integers, which can be manipulated.
Duncan Sands1df98592010-02-16 11:11:14 +0000906 if (StoredValTy->isPointerTy()) {
Chris Lattner771a5422009-09-20 20:09:34 +0000907 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
908 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
909 }
910
911 // Convert vectors and fp to integer, which can be manipulated.
Duncan Sands1df98592010-02-16 11:11:14 +0000912 if (!StoredValTy->isIntegerTy()) {
Chris Lattner771a5422009-09-20 20:09:34 +0000913 StoredValTy = IntegerType::get(StoredValTy->getContext(), StoreSize);
914 StoredVal = new BitCastInst(StoredVal, StoredValTy, "", InsertPt);
915 }
916
917 // If this is a big-endian system, we need to shift the value down to the low
918 // bits so that a truncate will work.
919 if (TD.isBigEndian()) {
920 Constant *Val = ConstantInt::get(StoredVal->getType(), StoreSize-LoadSize);
921 StoredVal = BinaryOperator::CreateLShr(StoredVal, Val, "tmp", InsertPt);
922 }
923
924 // Truncate the integer to the right size now.
925 const Type *NewIntTy = IntegerType::get(StoredValTy->getContext(), LoadSize);
926 StoredVal = new TruncInst(StoredVal, NewIntTy, "trunc", InsertPt);
927
928 if (LoadedTy == NewIntTy)
929 return StoredVal;
930
931 // If the result is a pointer, inttoptr.
Duncan Sands1df98592010-02-16 11:11:14 +0000932 if (LoadedTy->isPointerTy())
Chris Lattner771a5422009-09-20 20:09:34 +0000933 return new IntToPtrInst(StoredVal, LoadedTy, "inttoptr", InsertPt);
934
935 // Otherwise, bitcast.
936 return new BitCastInst(StoredVal, LoadedTy, "bitcast", InsertPt);
937}
938
Chris Lattnerca749402009-09-21 06:24:16 +0000939/// GetBaseWithConstantOffset - Analyze the specified pointer to see if it can
940/// be expressed as a base pointer plus a constant offset. Return the base and
941/// offset to the caller.
942static Value *GetBaseWithConstantOffset(Value *Ptr, int64_t &Offset,
Chris Lattner4fbd14e2009-09-21 06:48:08 +0000943 const TargetData &TD) {
Chris Lattnerca749402009-09-21 06:24:16 +0000944 Operator *PtrOp = dyn_cast<Operator>(Ptr);
945 if (PtrOp == 0) return Ptr;
946
947 // Just look through bitcasts.
948 if (PtrOp->getOpcode() == Instruction::BitCast)
949 return GetBaseWithConstantOffset(PtrOp->getOperand(0), Offset, TD);
950
951 // If this is a GEP with constant indices, we can look through it.
952 GEPOperator *GEP = dyn_cast<GEPOperator>(PtrOp);
953 if (GEP == 0 || !GEP->hasAllConstantIndices()) return Ptr;
954
955 gep_type_iterator GTI = gep_type_begin(GEP);
956 for (User::op_iterator I = GEP->idx_begin(), E = GEP->idx_end(); I != E;
957 ++I, ++GTI) {
958 ConstantInt *OpC = cast<ConstantInt>(*I);
959 if (OpC->isZero()) continue;
960
961 // Handle a struct and array indices which add their offset to the pointer.
962 if (const StructType *STy = dyn_cast<StructType>(*GTI)) {
Chris Lattner4fbd14e2009-09-21 06:48:08 +0000963 Offset += TD.getStructLayout(STy)->getElementOffset(OpC->getZExtValue());
Chris Lattnerca749402009-09-21 06:24:16 +0000964 } else {
Chris Lattner4fbd14e2009-09-21 06:48:08 +0000965 uint64_t Size = TD.getTypeAllocSize(GTI.getIndexedType());
Chris Lattnerca749402009-09-21 06:24:16 +0000966 Offset += OpC->getSExtValue()*Size;
967 }
968 }
969
970 // Re-sign extend from the pointer size if needed to get overflow edge cases
971 // right.
Chris Lattner4fbd14e2009-09-21 06:48:08 +0000972 unsigned PtrSize = TD.getPointerSizeInBits();
Chris Lattnerca749402009-09-21 06:24:16 +0000973 if (PtrSize < 64)
974 Offset = (Offset << (64-PtrSize)) >> (64-PtrSize);
975
976 return GetBaseWithConstantOffset(GEP->getPointerOperand(), Offset, TD);
977}
978
979
Chris Lattnerfaf815b2009-12-06 01:57:02 +0000980/// AnalyzeLoadFromClobberingWrite - This function is called when we have a
981/// memdep query of a load that ends up being a clobbering memory write (store,
982/// memset, memcpy, memmove). This means that the write *may* provide bits used
983/// by the load but we can't be sure because the pointers don't mustalias.
984///
985/// Check this case to see if there is anything more we can do before we give
986/// up. This returns -1 if we have to give up, or a byte number in the stored
987/// value of the piece that feeds the load.
Chris Lattner03f17da2009-12-09 07:34:10 +0000988static int AnalyzeLoadFromClobberingWrite(const Type *LoadTy, Value *LoadPtr,
989 Value *WritePtr,
Chris Lattnerfaf815b2009-12-06 01:57:02 +0000990 uint64_t WriteSizeInBits,
Chris Lattner4fbd14e2009-09-21 06:48:08 +0000991 const TargetData &TD) {
Chris Lattner8b2bc3d2009-09-21 17:24:04 +0000992 // If the loaded or stored value is an first class array or struct, don't try
993 // to transform them. We need to be able to bitcast to integer.
Duncan Sands1df98592010-02-16 11:11:14 +0000994 if (LoadTy->isStructTy() || LoadTy->isArrayTy())
Chris Lattner8b2bc3d2009-09-21 17:24:04 +0000995 return -1;
996
Chris Lattnerca749402009-09-21 06:24:16 +0000997 int64_t StoreOffset = 0, LoadOffset = 0;
Chris Lattnerfaf815b2009-12-06 01:57:02 +0000998 Value *StoreBase = GetBaseWithConstantOffset(WritePtr, StoreOffset, TD);
Chris Lattnerca749402009-09-21 06:24:16 +0000999 Value *LoadBase =
Chris Lattner03f17da2009-12-09 07:34:10 +00001000 GetBaseWithConstantOffset(LoadPtr, LoadOffset, TD);
Chris Lattnerca749402009-09-21 06:24:16 +00001001 if (StoreBase != LoadBase)
1002 return -1;
1003
1004 // If the load and store are to the exact same address, they should have been
1005 // a must alias. AA must have gotten confused.
Chris Lattner219d7742010-03-25 05:58:19 +00001006 // FIXME: Study to see if/when this happens. One case is forwarding a memset
1007 // to a load from the base of the memset.
Chris Lattnerca749402009-09-21 06:24:16 +00001008#if 0
Chris Lattner219d7742010-03-25 05:58:19 +00001009 if (LoadOffset == StoreOffset) {
David Greenebf7f78e2010-01-05 01:27:17 +00001010 dbgs() << "STORE/LOAD DEP WITH COMMON POINTER MISSED:\n"
Chris Lattnerca749402009-09-21 06:24:16 +00001011 << "Base = " << *StoreBase << "\n"
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001012 << "Store Ptr = " << *WritePtr << "\n"
1013 << "Store Offs = " << StoreOffset << "\n"
Chris Lattnerb6760b42009-12-10 00:04:46 +00001014 << "Load Ptr = " << *LoadPtr << "\n";
Chris Lattnerb3f927f2009-12-09 02:41:54 +00001015 abort();
Chris Lattnerca749402009-09-21 06:24:16 +00001016 }
Chris Lattner219d7742010-03-25 05:58:19 +00001017#endif
Chris Lattnerca749402009-09-21 06:24:16 +00001018
1019 // If the load and store don't overlap at all, the store doesn't provide
1020 // anything to the load. In this case, they really don't alias at all, AA
1021 // must have gotten confused.
1022 // FIXME: Investigate cases where this bails out, e.g. rdar://7238614. Then
1023 // remove this check, as it is duplicated with what we have below.
Chris Lattner03f17da2009-12-09 07:34:10 +00001024 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy);
Chris Lattnerca749402009-09-21 06:24:16 +00001025
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001026 if ((WriteSizeInBits & 7) | (LoadSize & 7))
Chris Lattnerca749402009-09-21 06:24:16 +00001027 return -1;
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001028 uint64_t StoreSize = WriteSizeInBits >> 3; // Convert to bytes.
Chris Lattnerca749402009-09-21 06:24:16 +00001029 LoadSize >>= 3;
1030
1031
1032 bool isAAFailure = false;
Chris Lattner219d7742010-03-25 05:58:19 +00001033 if (StoreOffset < LoadOffset)
Chris Lattnerca749402009-09-21 06:24:16 +00001034 isAAFailure = StoreOffset+int64_t(StoreSize) <= LoadOffset;
Chris Lattner219d7742010-03-25 05:58:19 +00001035 else
Chris Lattnerca749402009-09-21 06:24:16 +00001036 isAAFailure = LoadOffset+int64_t(LoadSize) <= StoreOffset;
Chris Lattner219d7742010-03-25 05:58:19 +00001037
Chris Lattnerca749402009-09-21 06:24:16 +00001038 if (isAAFailure) {
1039#if 0
David Greenebf7f78e2010-01-05 01:27:17 +00001040 dbgs() << "STORE LOAD DEP WITH COMMON BASE:\n"
Chris Lattnerca749402009-09-21 06:24:16 +00001041 << "Base = " << *StoreBase << "\n"
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001042 << "Store Ptr = " << *WritePtr << "\n"
1043 << "Store Offs = " << StoreOffset << "\n"
Chris Lattnerb6760b42009-12-10 00:04:46 +00001044 << "Load Ptr = " << *LoadPtr << "\n";
Chris Lattnerb3f927f2009-12-09 02:41:54 +00001045 abort();
Chris Lattnerca749402009-09-21 06:24:16 +00001046#endif
1047 return -1;
1048 }
1049
1050 // If the Load isn't completely contained within the stored bits, we don't
1051 // have all the bits to feed it. We could do something crazy in the future
1052 // (issue a smaller load then merge the bits in) but this seems unlikely to be
1053 // valuable.
1054 if (StoreOffset > LoadOffset ||
1055 StoreOffset+StoreSize < LoadOffset+LoadSize)
1056 return -1;
1057
1058 // Okay, we can do this transformation. Return the number of bytes into the
1059 // store that the load is.
1060 return LoadOffset-StoreOffset;
1061}
1062
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001063/// AnalyzeLoadFromClobberingStore - This function is called when we have a
1064/// memdep query of a load that ends up being a clobbering store.
Chris Lattner4ca70fe2009-12-09 07:37:07 +00001065static int AnalyzeLoadFromClobberingStore(const Type *LoadTy, Value *LoadPtr,
1066 StoreInst *DepSI,
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001067 const TargetData &TD) {
1068 // Cannot handle reading from store of first-class aggregate yet.
Duncan Sands1df98592010-02-16 11:11:14 +00001069 if (DepSI->getOperand(0)->getType()->isStructTy() ||
1070 DepSI->getOperand(0)->getType()->isArrayTy())
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001071 return -1;
1072
1073 Value *StorePtr = DepSI->getPointerOperand();
Chris Lattner16f244e2009-12-10 00:11:45 +00001074 uint64_t StoreSize = TD.getTypeSizeInBits(DepSI->getOperand(0)->getType());
Chris Lattner4ca70fe2009-12-09 07:37:07 +00001075 return AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr,
Chris Lattner03f17da2009-12-09 07:34:10 +00001076 StorePtr, StoreSize, TD);
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001077}
1078
Chris Lattner4ca70fe2009-12-09 07:37:07 +00001079static int AnalyzeLoadFromClobberingMemInst(const Type *LoadTy, Value *LoadPtr,
1080 MemIntrinsic *MI,
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001081 const TargetData &TD) {
1082 // If the mem operation is a non-constant size, we can't handle it.
1083 ConstantInt *SizeCst = dyn_cast<ConstantInt>(MI->getLength());
1084 if (SizeCst == 0) return -1;
1085 uint64_t MemSizeInBits = SizeCst->getZExtValue()*8;
Chris Lattnerbc9a28d2009-12-06 05:29:56 +00001086
1087 // If this is memset, we just need to see if the offset is valid in the size
1088 // of the memset..
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001089 if (MI->getIntrinsicID() == Intrinsic::memset)
Chris Lattner4ca70fe2009-12-09 07:37:07 +00001090 return AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr, MI->getDest(),
1091 MemSizeInBits, TD);
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001092
Chris Lattnerbc9a28d2009-12-06 05:29:56 +00001093 // If we have a memcpy/memmove, the only case we can handle is if this is a
1094 // copy from constant memory. In that case, we can read directly from the
1095 // constant memory.
1096 MemTransferInst *MTI = cast<MemTransferInst>(MI);
1097
1098 Constant *Src = dyn_cast<Constant>(MTI->getSource());
1099 if (Src == 0) return -1;
1100
1101 GlobalVariable *GV = dyn_cast<GlobalVariable>(Src->getUnderlyingObject());
1102 if (GV == 0 || !GV->isConstant()) return -1;
1103
1104 // See if the access is within the bounds of the transfer.
Chris Lattner4ca70fe2009-12-09 07:37:07 +00001105 int Offset = AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr,
1106 MI->getDest(), MemSizeInBits, TD);
Chris Lattnerbc9a28d2009-12-06 05:29:56 +00001107 if (Offset == -1)
1108 return Offset;
1109
1110 // Otherwise, see if we can constant fold a load from the constant with the
1111 // offset applied as appropriate.
1112 Src = ConstantExpr::getBitCast(Src,
1113 llvm::Type::getInt8PtrTy(Src->getContext()));
1114 Constant *OffsetCst =
1115 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1116 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
Chris Lattner4ca70fe2009-12-09 07:37:07 +00001117 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(LoadTy));
Chris Lattnerbc9a28d2009-12-06 05:29:56 +00001118 if (ConstantFoldLoadFromConstPtr(Src, &TD))
1119 return Offset;
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001120 return -1;
1121}
1122
Chris Lattnerca749402009-09-21 06:24:16 +00001123
1124/// GetStoreValueForLoad - This function is called when we have a
1125/// memdep query of a load that ends up being a clobbering store. This means
1126/// that the store *may* provide bits used by the load but we can't be sure
1127/// because the pointers don't mustalias. Check this case to see if there is
1128/// anything more we can do before we give up.
Chris Lattner4fbd14e2009-09-21 06:48:08 +00001129static Value *GetStoreValueForLoad(Value *SrcVal, unsigned Offset,
1130 const Type *LoadTy,
1131 Instruction *InsertPt, const TargetData &TD){
Chris Lattnerca749402009-09-21 06:24:16 +00001132 LLVMContext &Ctx = SrcVal->getType()->getContext();
1133
Chris Lattner7944c212010-05-08 20:01:44 +00001134 uint64_t StoreSize = (TD.getTypeSizeInBits(SrcVal->getType()) + 7) / 8;
1135 uint64_t LoadSize = (TD.getTypeSizeInBits(LoadTy) + 7) / 8;
Chris Lattnerca749402009-09-21 06:24:16 +00001136
Chris Lattnerb2c6ae82009-12-09 18:13:28 +00001137 IRBuilder<> Builder(InsertPt->getParent(), InsertPt);
Chris Lattnerca749402009-09-21 06:24:16 +00001138
1139 // Compute which bits of the stored value are being used by the load. Convert
1140 // to an integer type to start with.
Duncan Sands1df98592010-02-16 11:11:14 +00001141 if (SrcVal->getType()->isPointerTy())
Chris Lattnerb2c6ae82009-12-09 18:13:28 +00001142 SrcVal = Builder.CreatePtrToInt(SrcVal, TD.getIntPtrType(Ctx), "tmp");
Duncan Sands1df98592010-02-16 11:11:14 +00001143 if (!SrcVal->getType()->isIntegerTy())
Chris Lattnerb2c6ae82009-12-09 18:13:28 +00001144 SrcVal = Builder.CreateBitCast(SrcVal, IntegerType::get(Ctx, StoreSize*8),
1145 "tmp");
Chris Lattnerca749402009-09-21 06:24:16 +00001146
1147 // Shift the bits to the least significant depending on endianness.
1148 unsigned ShiftAmt;
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001149 if (TD.isLittleEndian())
Chris Lattnerca749402009-09-21 06:24:16 +00001150 ShiftAmt = Offset*8;
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001151 else
Chris Lattner19ad7842009-09-21 17:55:47 +00001152 ShiftAmt = (StoreSize-LoadSize-Offset)*8;
Chris Lattnerca749402009-09-21 06:24:16 +00001153
Chris Lattner4fbd14e2009-09-21 06:48:08 +00001154 if (ShiftAmt)
Chris Lattnerb2c6ae82009-12-09 18:13:28 +00001155 SrcVal = Builder.CreateLShr(SrcVal, ShiftAmt, "tmp");
Chris Lattnerca749402009-09-21 06:24:16 +00001156
Chris Lattner4fbd14e2009-09-21 06:48:08 +00001157 if (LoadSize != StoreSize)
Chris Lattnerb2c6ae82009-12-09 18:13:28 +00001158 SrcVal = Builder.CreateTrunc(SrcVal, IntegerType::get(Ctx, LoadSize*8),
1159 "tmp");
Chris Lattnerca749402009-09-21 06:24:16 +00001160
Chris Lattner4fbd14e2009-09-21 06:48:08 +00001161 return CoerceAvailableValueToLoadType(SrcVal, LoadTy, InsertPt, TD);
Chris Lattnerca749402009-09-21 06:24:16 +00001162}
1163
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001164/// GetMemInstValueForLoad - This function is called when we have a
1165/// memdep query of a load that ends up being a clobbering mem intrinsic.
1166static Value *GetMemInstValueForLoad(MemIntrinsic *SrcInst, unsigned Offset,
1167 const Type *LoadTy, Instruction *InsertPt,
1168 const TargetData &TD){
1169 LLVMContext &Ctx = LoadTy->getContext();
1170 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy)/8;
1171
1172 IRBuilder<> Builder(InsertPt->getParent(), InsertPt);
1173
1174 // We know that this method is only called when the mem transfer fully
1175 // provides the bits for the load.
1176 if (MemSetInst *MSI = dyn_cast<MemSetInst>(SrcInst)) {
1177 // memset(P, 'x', 1234) -> splat('x'), even if x is a variable, and
1178 // independently of what the offset is.
1179 Value *Val = MSI->getValue();
1180 if (LoadSize != 1)
1181 Val = Builder.CreateZExt(Val, IntegerType::get(Ctx, LoadSize*8));
1182
1183 Value *OneElt = Val;
1184
1185 // Splat the value out to the right number of bits.
1186 for (unsigned NumBytesSet = 1; NumBytesSet != LoadSize; ) {
1187 // If we can double the number of bytes set, do it.
1188 if (NumBytesSet*2 <= LoadSize) {
1189 Value *ShVal = Builder.CreateShl(Val, NumBytesSet*8);
1190 Val = Builder.CreateOr(Val, ShVal);
1191 NumBytesSet <<= 1;
1192 continue;
1193 }
1194
1195 // Otherwise insert one byte at a time.
1196 Value *ShVal = Builder.CreateShl(Val, 1*8);
1197 Val = Builder.CreateOr(OneElt, ShVal);
1198 ++NumBytesSet;
1199 }
1200
1201 return CoerceAvailableValueToLoadType(Val, LoadTy, InsertPt, TD);
1202 }
Chris Lattnerbc9a28d2009-12-06 05:29:56 +00001203
1204 // Otherwise, this is a memcpy/memmove from a constant global.
1205 MemTransferInst *MTI = cast<MemTransferInst>(SrcInst);
1206 Constant *Src = cast<Constant>(MTI->getSource());
1207
1208 // Otherwise, see if we can constant fold a load from the constant with the
1209 // offset applied as appropriate.
1210 Src = ConstantExpr::getBitCast(Src,
1211 llvm::Type::getInt8PtrTy(Src->getContext()));
1212 Constant *OffsetCst =
1213 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1214 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
1215 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(LoadTy));
1216 return ConstantFoldLoadFromConstPtr(Src, &TD);
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001217}
1218
Dan Gohmanb3579832010-04-15 17:08:50 +00001219namespace {
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001220
Chris Lattner87913512009-09-21 06:30:24 +00001221struct AvailableValueInBlock {
1222 /// BB - The basic block in question.
1223 BasicBlock *BB;
Chris Lattnercb9cbc42009-12-06 04:54:31 +00001224 enum ValType {
1225 SimpleVal, // A simple offsetted value that is accessed.
1226 MemIntrin // A memory intrinsic which is loaded from.
1227 };
1228
Chris Lattner87913512009-09-21 06:30:24 +00001229 /// V - The value that is live out of the block.
Chris Lattnercb9cbc42009-12-06 04:54:31 +00001230 PointerIntPair<Value *, 1, ValType> Val;
1231
1232 /// Offset - The byte offset in Val that is interesting for the load query.
Chris Lattner4fbd14e2009-09-21 06:48:08 +00001233 unsigned Offset;
Chris Lattner87913512009-09-21 06:30:24 +00001234
Chris Lattner4fbd14e2009-09-21 06:48:08 +00001235 static AvailableValueInBlock get(BasicBlock *BB, Value *V,
1236 unsigned Offset = 0) {
Chris Lattner87913512009-09-21 06:30:24 +00001237 AvailableValueInBlock Res;
1238 Res.BB = BB;
Chris Lattnercb9cbc42009-12-06 04:54:31 +00001239 Res.Val.setPointer(V);
1240 Res.Val.setInt(SimpleVal);
Chris Lattner4fbd14e2009-09-21 06:48:08 +00001241 Res.Offset = Offset;
Chris Lattner87913512009-09-21 06:30:24 +00001242 return Res;
1243 }
Chris Lattnercb9cbc42009-12-06 04:54:31 +00001244
1245 static AvailableValueInBlock getMI(BasicBlock *BB, MemIntrinsic *MI,
1246 unsigned Offset = 0) {
1247 AvailableValueInBlock Res;
1248 Res.BB = BB;
1249 Res.Val.setPointer(MI);
1250 Res.Val.setInt(MemIntrin);
1251 Res.Offset = Offset;
1252 return Res;
1253 }
1254
1255 bool isSimpleValue() const { return Val.getInt() == SimpleVal; }
1256 Value *getSimpleValue() const {
1257 assert(isSimpleValue() && "Wrong accessor");
1258 return Val.getPointer();
1259 }
1260
1261 MemIntrinsic *getMemIntrinValue() const {
1262 assert(!isSimpleValue() && "Wrong accessor");
1263 return cast<MemIntrinsic>(Val.getPointer());
1264 }
Chris Lattner5362c542009-12-21 23:04:33 +00001265
1266 /// MaterializeAdjustedValue - Emit code into this block to adjust the value
1267 /// defined here to the specified type. This handles various coercion cases.
1268 Value *MaterializeAdjustedValue(const Type *LoadTy,
1269 const TargetData *TD) const {
1270 Value *Res;
1271 if (isSimpleValue()) {
1272 Res = getSimpleValue();
1273 if (Res->getType() != LoadTy) {
1274 assert(TD && "Need target data to handle type mismatch case");
1275 Res = GetStoreValueForLoad(Res, Offset, LoadTy, BB->getTerminator(),
1276 *TD);
1277
1278 DEBUG(errs() << "GVN COERCED NONLOCAL VAL:\nOffset: " << Offset << " "
1279 << *getSimpleValue() << '\n'
1280 << *Res << '\n' << "\n\n\n");
1281 }
1282 } else {
1283 Res = GetMemInstValueForLoad(getMemIntrinValue(), Offset,
1284 LoadTy, BB->getTerminator(), *TD);
1285 DEBUG(errs() << "GVN COERCED NONLOCAL MEM INTRIN:\nOffset: " << Offset
1286 << " " << *getMemIntrinValue() << '\n'
1287 << *Res << '\n' << "\n\n\n");
1288 }
1289 return Res;
1290 }
Chris Lattner87913512009-09-21 06:30:24 +00001291};
1292
Dan Gohmanb3579832010-04-15 17:08:50 +00001293}
1294
Chris Lattnera09fbf02009-10-10 23:50:30 +00001295/// ConstructSSAForLoadSet - Given a set of loads specified by ValuesPerBlock,
1296/// construct SSA form, allowing us to eliminate LI. This returns the value
1297/// that should be used at LI's definition site.
1298static Value *ConstructSSAForLoadSet(LoadInst *LI,
1299 SmallVectorImpl<AvailableValueInBlock> &ValuesPerBlock,
1300 const TargetData *TD,
Chris Lattnerd2191e52009-12-21 23:15:48 +00001301 const DominatorTree &DT,
Chris Lattnera09fbf02009-10-10 23:50:30 +00001302 AliasAnalysis *AA) {
Chris Lattnerd2191e52009-12-21 23:15:48 +00001303 // Check for the fully redundant, dominating load case. In this case, we can
1304 // just use the dominating value directly.
1305 if (ValuesPerBlock.size() == 1 &&
1306 DT.properlyDominates(ValuesPerBlock[0].BB, LI->getParent()))
1307 return ValuesPerBlock[0].MaterializeAdjustedValue(LI->getType(), TD);
1308
1309 // Otherwise, we have to construct SSA form.
Chris Lattnera09fbf02009-10-10 23:50:30 +00001310 SmallVector<PHINode*, 8> NewPHIs;
1311 SSAUpdater SSAUpdate(&NewPHIs);
Duncan Sandsfc6e29d2010-09-02 08:14:03 +00001312 SSAUpdate.Initialize(LI->getType(), LI->getName());
Chris Lattnera09fbf02009-10-10 23:50:30 +00001313
1314 const Type *LoadTy = LI->getType();
1315
Chris Lattner771a5422009-09-20 20:09:34 +00001316 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
Chris Lattnercb9cbc42009-12-06 04:54:31 +00001317 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1318 BasicBlock *BB = AV.BB;
Chris Lattner771a5422009-09-20 20:09:34 +00001319
Chris Lattnera09fbf02009-10-10 23:50:30 +00001320 if (SSAUpdate.HasValueForBlock(BB))
1321 continue;
Chris Lattnercb9cbc42009-12-06 04:54:31 +00001322
Chris Lattner5362c542009-12-21 23:04:33 +00001323 SSAUpdate.AddAvailableValue(BB, AV.MaterializeAdjustedValue(LoadTy, TD));
Chris Lattner771a5422009-09-20 20:09:34 +00001324 }
Chris Lattnera09fbf02009-10-10 23:50:30 +00001325
1326 // Perform PHI construction.
1327 Value *V = SSAUpdate.GetValueInMiddleOfBlock(LI->getParent());
1328
1329 // If new PHI nodes were created, notify alias analysis.
Duncan Sands1df98592010-02-16 11:11:14 +00001330 if (V->getType()->isPointerTy())
Chris Lattnera09fbf02009-10-10 23:50:30 +00001331 for (unsigned i = 0, e = NewPHIs.size(); i != e; ++i)
1332 AA->copyValue(LI, NewPHIs[i]);
1333
1334 return V;
Chris Lattner771a5422009-09-20 20:09:34 +00001335}
1336
Gabor Greifea3eec92010-04-09 10:57:00 +00001337static bool isLifetimeStart(const Instruction *Inst) {
1338 if (const IntrinsicInst* II = dyn_cast<IntrinsicInst>(Inst))
Owen Anderson9ff5a232009-12-02 07:35:19 +00001339 return II->getIntrinsicID() == Intrinsic::lifetime_start;
Chris Lattner720e7902009-12-02 06:44:58 +00001340 return false;
1341}
1342
Owen Anderson62bc33c2007-08-16 22:02:55 +00001343/// processNonLocalLoad - Attempt to eliminate a load whose dependencies are
1344/// non-local by performing PHI construction.
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001345bool GVN::processNonLocalLoad(LoadInst *LI,
Chris Lattner8e1e95c2008-03-21 22:01:16 +00001346 SmallVectorImpl<Instruction*> &toErase) {
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001347 // Find the non-local dependencies of the load.
Chris Lattner0ee443d2009-12-22 04:25:02 +00001348 SmallVector<NonLocalDepResult, 64> Deps;
Chris Lattner91bcf642008-12-09 19:25:07 +00001349 MD->getNonLocalPointerDependency(LI->getOperand(0), true, LI->getParent(),
1350 Deps);
David Greenebf7f78e2010-01-05 01:27:17 +00001351 //DEBUG(dbgs() << "INVESTIGATING NONLOCAL LOAD: "
Dan Gohman2a298992009-07-31 20:24:18 +00001352 // << Deps.size() << *LI << '\n');
Daniel Dunbara279bc32009-09-20 02:20:51 +00001353
Owen Anderson516eb1c2008-08-26 22:07:42 +00001354 // If we had to process more than one hundred blocks to find the
1355 // dependencies, this load isn't worth worrying about. Optimizing
1356 // it will be too expensive.
Chris Lattner91bcf642008-12-09 19:25:07 +00001357 if (Deps.size() > 100)
Owen Anderson516eb1c2008-08-26 22:07:42 +00001358 return false;
Chris Lattner5f4f84b2008-12-18 00:51:32 +00001359
1360 // If we had a phi translation failure, we'll have a single entry which is a
1361 // clobber in the current block. Reject this early.
Chris Lattnere18b9712009-12-09 07:08:01 +00001362 if (Deps.size() == 1 && Deps[0].getResult().isClobber()) {
Torok Edwin4306b1a2009-06-17 18:48:18 +00001363 DEBUG(
David Greenebf7f78e2010-01-05 01:27:17 +00001364 dbgs() << "GVN: non-local load ";
1365 WriteAsOperand(dbgs(), LI);
1366 dbgs() << " is clobbered by " << *Deps[0].getResult().getInst() << '\n';
Torok Edwin4306b1a2009-06-17 18:48:18 +00001367 );
Chris Lattner5f4f84b2008-12-18 00:51:32 +00001368 return false;
Torok Edwin4306b1a2009-06-17 18:48:18 +00001369 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001370
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001371 // Filter out useless results (non-locals, etc). Keep track of the blocks
1372 // where we have a value available in repl, also keep track of whether we see
1373 // dependencies that produce an unknown value for the load (such as a call
1374 // that could potentially clobber the load).
Chris Lattner87913512009-09-21 06:30:24 +00001375 SmallVector<AvailableValueInBlock, 16> ValuesPerBlock;
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001376 SmallVector<BasicBlock*, 16> UnavailableBlocks;
Daniel Dunbara279bc32009-09-20 02:20:51 +00001377
Chris Lattner771a5422009-09-20 20:09:34 +00001378 const TargetData *TD = 0;
1379
Chris Lattner91bcf642008-12-09 19:25:07 +00001380 for (unsigned i = 0, e = Deps.size(); i != e; ++i) {
Chris Lattnere18b9712009-12-09 07:08:01 +00001381 BasicBlock *DepBB = Deps[i].getBB();
1382 MemDepResult DepInfo = Deps[i].getResult();
Daniel Dunbara279bc32009-09-20 02:20:51 +00001383
Chris Lattnerb51deb92008-12-05 21:04:20 +00001384 if (DepInfo.isClobber()) {
Chris Lattneraf064ae2009-12-09 18:21:46 +00001385 // The address being loaded in this non-local block may not be the same as
1386 // the pointer operand of the load if PHI translation occurs. Make sure
1387 // to consider the right address.
1388 Value *Address = Deps[i].getAddress();
1389
Chris Lattner4fbd14e2009-09-21 06:48:08 +00001390 // If the dependence is to a store that writes to a superset of the bits
1391 // read by the load, we can extract the bits we need for the load from the
1392 // stored value.
1393 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInfo.getInst())) {
1394 if (TD == 0)
1395 TD = getAnalysisIfAvailable<TargetData>();
Chris Lattneraf064ae2009-12-09 18:21:46 +00001396 if (TD && Address) {
1397 int Offset = AnalyzeLoadFromClobberingStore(LI->getType(), Address,
Chris Lattner4ca70fe2009-12-09 07:37:07 +00001398 DepSI, *TD);
Chris Lattner4fbd14e2009-09-21 06:48:08 +00001399 if (Offset != -1) {
1400 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1401 DepSI->getOperand(0),
1402 Offset));
1403 continue;
1404 }
1405 }
1406 }
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001407
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001408 // If the clobbering value is a memset/memcpy/memmove, see if we can
1409 // forward a value on from it.
Chris Lattnercb9cbc42009-12-06 04:54:31 +00001410 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(DepInfo.getInst())) {
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001411 if (TD == 0)
1412 TD = getAnalysisIfAvailable<TargetData>();
Chris Lattneraf064ae2009-12-09 18:21:46 +00001413 if (TD && Address) {
1414 int Offset = AnalyzeLoadFromClobberingMemInst(LI->getType(), Address,
Chris Lattner4ca70fe2009-12-09 07:37:07 +00001415 DepMI, *TD);
Chris Lattnercb9cbc42009-12-06 04:54:31 +00001416 if (Offset != -1) {
1417 ValuesPerBlock.push_back(AvailableValueInBlock::getMI(DepBB, DepMI,
1418 Offset));
1419 continue;
1420 }
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001421 }
1422 }
Chris Lattner4fbd14e2009-09-21 06:48:08 +00001423
Chris Lattnerb51deb92008-12-05 21:04:20 +00001424 UnavailableBlocks.push_back(DepBB);
1425 continue;
1426 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001427
Chris Lattnerb51deb92008-12-05 21:04:20 +00001428 Instruction *DepInst = DepInfo.getInst();
Daniel Dunbara279bc32009-09-20 02:20:51 +00001429
Chris Lattnerb51deb92008-12-05 21:04:20 +00001430 // Loading the allocation -> undef.
Chris Lattner720e7902009-12-02 06:44:58 +00001431 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst) ||
Owen Anderson9ff5a232009-12-02 07:35:19 +00001432 // Loading immediately after lifetime begin -> undef.
1433 isLifetimeStart(DepInst)) {
Chris Lattner87913512009-09-21 06:30:24 +00001434 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1435 UndefValue::get(LI->getType())));
Chris Lattnerbf145d62008-12-01 01:15:42 +00001436 continue;
1437 }
Owen Andersonb62f7922009-10-28 07:05:35 +00001438
Chris Lattner87913512009-09-21 06:30:24 +00001439 if (StoreInst *S = dyn_cast<StoreInst>(DepInst)) {
Daniel Dunbara279bc32009-09-20 02:20:51 +00001440 // Reject loads and stores that are to the same address but are of
Chris Lattner771a5422009-09-20 20:09:34 +00001441 // different types if we have to.
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001442 if (S->getOperand(0)->getType() != LI->getType()) {
Chris Lattner771a5422009-09-20 20:09:34 +00001443 if (TD == 0)
1444 TD = getAnalysisIfAvailable<TargetData>();
1445
1446 // If the stored value is larger or equal to the loaded value, we can
1447 // reuse it.
Chris Lattner8b2bc3d2009-09-21 17:24:04 +00001448 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(S->getOperand(0),
1449 LI->getType(), *TD)) {
Chris Lattner771a5422009-09-20 20:09:34 +00001450 UnavailableBlocks.push_back(DepBB);
1451 continue;
1452 }
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001453 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001454
Chris Lattner87913512009-09-21 06:30:24 +00001455 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1456 S->getOperand(0)));
Chris Lattner4fbd14e2009-09-21 06:48:08 +00001457 continue;
1458 }
1459
1460 if (LoadInst *LD = dyn_cast<LoadInst>(DepInst)) {
Chris Lattner771a5422009-09-20 20:09:34 +00001461 // If the types mismatch and we can't handle it, reject reuse of the load.
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001462 if (LD->getType() != LI->getType()) {
Chris Lattner771a5422009-09-20 20:09:34 +00001463 if (TD == 0)
1464 TD = getAnalysisIfAvailable<TargetData>();
1465
1466 // If the stored value is larger or equal to the loaded value, we can
1467 // reuse it.
Chris Lattner8b2bc3d2009-09-21 17:24:04 +00001468 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(LD, LI->getType(),*TD)){
Chris Lattner771a5422009-09-20 20:09:34 +00001469 UnavailableBlocks.push_back(DepBB);
1470 continue;
1471 }
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001472 }
Chris Lattner87913512009-09-21 06:30:24 +00001473 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB, LD));
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001474 continue;
Owen Anderson0cd32032007-07-25 19:57:03 +00001475 }
Chris Lattner4fbd14e2009-09-21 06:48:08 +00001476
1477 UnavailableBlocks.push_back(DepBB);
1478 continue;
Chris Lattner88365bb2008-03-21 21:14:38 +00001479 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001480
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001481 // If we have no predecessors that produce a known value for this load, exit
1482 // early.
1483 if (ValuesPerBlock.empty()) return false;
Daniel Dunbara279bc32009-09-20 02:20:51 +00001484
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001485 // If all of the instructions we depend on produce a known value for this
1486 // load, then it is fully redundant and we can use PHI insertion to compute
1487 // its value. Insert PHIs and remove the fully redundant value now.
1488 if (UnavailableBlocks.empty()) {
David Greenebf7f78e2010-01-05 01:27:17 +00001489 DEBUG(dbgs() << "GVN REMOVING NONLOCAL LOAD: " << *LI << '\n');
Chris Lattner771a5422009-09-20 20:09:34 +00001490
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001491 // Perform PHI construction.
Chris Lattnerd2191e52009-12-21 23:15:48 +00001492 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD, *DT,
Chris Lattnera09fbf02009-10-10 23:50:30 +00001493 VN.getAliasAnalysis());
Chris Lattner771a5422009-09-20 20:09:34 +00001494 LI->replaceAllUsesWith(V);
Daniel Dunbara279bc32009-09-20 02:20:51 +00001495
Chris Lattner771a5422009-09-20 20:09:34 +00001496 if (isa<PHINode>(V))
1497 V->takeName(LI);
Duncan Sands1df98592010-02-16 11:11:14 +00001498 if (V->getType()->isPointerTy())
Chris Lattner771a5422009-09-20 20:09:34 +00001499 MD->invalidateCachedPointerInfo(V);
Bob Wilson74175c22010-02-22 21:39:41 +00001500 VN.erase(LI);
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001501 toErase.push_back(LI);
Dan Gohmanfe601042010-06-22 15:08:57 +00001502 ++NumGVNLoad;
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001503 return true;
1504 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001505
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001506 if (!EnablePRE || !EnableLoadPRE)
1507 return false;
1508
1509 // Okay, we have *some* definitions of the value. This means that the value
1510 // is available in some of our (transitive) predecessors. Lets think about
1511 // doing PRE of this load. This will involve inserting a new load into the
1512 // predecessor when it's not available. We could do this in general, but
1513 // prefer to not increase code size. As such, we only do this when we know
1514 // that we only have to insert *one* load (which means we're basically moving
1515 // the load, not inserting a new one).
Daniel Dunbara279bc32009-09-20 02:20:51 +00001516
Owen Anderson88554df2009-05-31 09:03:40 +00001517 SmallPtrSet<BasicBlock *, 4> Blockers;
1518 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1519 Blockers.insert(UnavailableBlocks[i]);
1520
1521 // Lets find first basic block with more than one predecessor. Walk backwards
1522 // through predecessors if needed.
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001523 BasicBlock *LoadBB = LI->getParent();
Owen Anderson88554df2009-05-31 09:03:40 +00001524 BasicBlock *TmpBB = LoadBB;
1525
1526 bool isSinglePred = false;
Dale Johannesen42c3f552009-06-17 20:48:23 +00001527 bool allSingleSucc = true;
Owen Anderson88554df2009-05-31 09:03:40 +00001528 while (TmpBB->getSinglePredecessor()) {
1529 isSinglePred = true;
1530 TmpBB = TmpBB->getSinglePredecessor();
Owen Anderson88554df2009-05-31 09:03:40 +00001531 if (TmpBB == LoadBB) // Infinite (unreachable) loop.
1532 return false;
1533 if (Blockers.count(TmpBB))
1534 return false;
Owen Andersonb0ba0f42010-09-25 05:26:18 +00001535
1536 // If any of these blocks has more than one successor (i.e. if the edge we
1537 // just traversed was critical), then there are other paths through this
1538 // block along which the load may not be anticipated. Hoisting the load
1539 // above this block would be adding the load to execution paths along
1540 // which it was not previously executed.
Dale Johannesen42c3f552009-06-17 20:48:23 +00001541 if (TmpBB->getTerminator()->getNumSuccessors() != 1)
Owen Andersonb0ba0f42010-09-25 05:26:18 +00001542 return false;
Owen Anderson88554df2009-05-31 09:03:40 +00001543 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001544
Owen Anderson88554df2009-05-31 09:03:40 +00001545 assert(TmpBB);
1546 LoadBB = TmpBB;
Daniel Dunbara279bc32009-09-20 02:20:51 +00001547
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001548 // If we have a repl set with LI itself in it, this means we have a loop where
1549 // at least one of the values is LI. Since this means that we won't be able
1550 // to eliminate LI even if we insert uses in the other predecessors, we will
1551 // end up increasing code size. Reject this by scanning for LI.
Bob Wilson3bd19d72010-03-02 00:09:29 +00001552 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
1553 if (ValuesPerBlock[i].isSimpleValue() &&
1554 ValuesPerBlock[i].getSimpleValue() == LI) {
1555 // Skip cases where LI is the only definition, even for EnableFullLoadPRE.
1556 if (!EnableFullLoadPRE || e == 1)
Bob Wilson6cad4172010-02-01 21:17:14 +00001557 return false;
Bob Wilson3bd19d72010-03-02 00:09:29 +00001558 }
Bob Wilson6cad4172010-02-01 21:17:14 +00001559 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001560
Chris Lattnercb9cbc42009-12-06 04:54:31 +00001561 // FIXME: It is extremely unclear what this loop is doing, other than
1562 // artificially restricting loadpre.
Owen Anderson88554df2009-05-31 09:03:40 +00001563 if (isSinglePred) {
1564 bool isHot = false;
Chris Lattnercb9cbc42009-12-06 04:54:31 +00001565 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
1566 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1567 if (AV.isSimpleValue())
Daniel Dunbara279bc32009-09-20 02:20:51 +00001568 // "Hot" Instruction is in some loop (because it dominates its dep.
1569 // instruction).
Chris Lattnercb9cbc42009-12-06 04:54:31 +00001570 if (Instruction *I = dyn_cast<Instruction>(AV.getSimpleValue()))
1571 if (DT->dominates(LI, I)) {
1572 isHot = true;
1573 break;
1574 }
1575 }
Owen Anderson88554df2009-05-31 09:03:40 +00001576
1577 // We are interested only in "hot" instructions. We don't want to do any
1578 // mis-optimizations here.
1579 if (!isHot)
1580 return false;
1581 }
1582
Bob Wilson6cad4172010-02-01 21:17:14 +00001583 // Check to see how many predecessors have the loaded value fully
1584 // available.
1585 DenseMap<BasicBlock*, Value*> PredLoads;
Chris Lattner72bc70d2008-12-05 07:49:08 +00001586 DenseMap<BasicBlock*, char> FullyAvailableBlocks;
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001587 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i)
Chris Lattner87913512009-09-21 06:30:24 +00001588 FullyAvailableBlocks[ValuesPerBlock[i].BB] = true;
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001589 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1590 FullyAvailableBlocks[UnavailableBlocks[i]] = false;
1591
Bob Wilson34414a62010-05-04 20:03:21 +00001592 SmallVector<std::pair<TerminatorInst*, unsigned>, 4> NeedToSplit;
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001593 for (pred_iterator PI = pred_begin(LoadBB), E = pred_end(LoadBB);
1594 PI != E; ++PI) {
Bob Wilson6cad4172010-02-01 21:17:14 +00001595 BasicBlock *Pred = *PI;
1596 if (IsValueFullyAvailableInBlock(Pred, FullyAvailableBlocks)) {
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001597 continue;
Bob Wilson6cad4172010-02-01 21:17:14 +00001598 }
1599 PredLoads[Pred] = 0;
Bob Wilson484d4a32010-02-16 19:51:59 +00001600
Bob Wilson6cad4172010-02-01 21:17:14 +00001601 if (Pred->getTerminator()->getNumSuccessors() != 1) {
Bob Wilson484d4a32010-02-16 19:51:59 +00001602 if (isa<IndirectBrInst>(Pred->getTerminator())) {
1603 DEBUG(dbgs() << "COULD NOT PRE LOAD BECAUSE OF INDBR CRITICAL EDGE '"
1604 << Pred->getName() << "': " << *LI << '\n');
1605 return false;
1606 }
Bob Wilsonae23daf2010-02-16 21:06:42 +00001607 unsigned SuccNum = GetSuccessorNumber(Pred, LoadBB);
Bob Wilson34414a62010-05-04 20:03:21 +00001608 NeedToSplit.push_back(std::make_pair(Pred->getTerminator(), SuccNum));
Bob Wilson6cad4172010-02-01 21:17:14 +00001609 }
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001610 }
Bob Wilson34414a62010-05-04 20:03:21 +00001611 if (!NeedToSplit.empty()) {
Bob Wilsonbc786532010-05-05 20:44:15 +00001612 toSplit.append(NeedToSplit.begin(), NeedToSplit.end());
Bob Wilson70704972010-03-01 23:37:32 +00001613 return false;
Bob Wilson34414a62010-05-04 20:03:21 +00001614 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001615
Bob Wilson6cad4172010-02-01 21:17:14 +00001616 // Decide whether PRE is profitable for this load.
1617 unsigned NumUnavailablePreds = PredLoads.size();
1618 assert(NumUnavailablePreds != 0 &&
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001619 "Fully available value should be eliminated above!");
Bob Wilson6cad4172010-02-01 21:17:14 +00001620 if (!EnableFullLoadPRE) {
1621 // If this load is unavailable in multiple predecessors, reject it.
1622 // FIXME: If we could restructure the CFG, we could make a common pred with
1623 // all the preds that don't have an available LI and insert a new load into
1624 // that one block.
1625 if (NumUnavailablePreds != 1)
1626 return false;
Owen Andersona37226a2007-08-07 23:12:31 +00001627 }
Bob Wilson6cad4172010-02-01 21:17:14 +00001628
1629 // Check if the load can safely be moved to all the unavailable predecessors.
1630 bool CanDoPRE = true;
Chris Lattnerdd696052009-11-28 15:39:14 +00001631 SmallVector<Instruction*, 8> NewInsts;
Bob Wilson6cad4172010-02-01 21:17:14 +00001632 for (DenseMap<BasicBlock*, Value*>::iterator I = PredLoads.begin(),
1633 E = PredLoads.end(); I != E; ++I) {
1634 BasicBlock *UnavailablePred = I->first;
1635
1636 // Do PHI translation to get its value in the predecessor if necessary. The
1637 // returned pointer (if non-null) is guaranteed to dominate UnavailablePred.
1638
1639 // If all preds have a single successor, then we know it is safe to insert
1640 // the load on the pred (?!?), so we can insert code to materialize the
1641 // pointer if it is not available.
1642 PHITransAddr Address(LI->getOperand(0), TD);
1643 Value *LoadPtr = 0;
1644 if (allSingleSucc) {
1645 LoadPtr = Address.PHITranslateWithInsertion(LoadBB, UnavailablePred,
1646 *DT, NewInsts);
1647 } else {
Daniel Dunbar6d8f2ca2010-02-24 08:48:04 +00001648 Address.PHITranslateValue(LoadBB, UnavailablePred, DT);
Bob Wilson6cad4172010-02-01 21:17:14 +00001649 LoadPtr = Address.getAddr();
Bob Wilson6cad4172010-02-01 21:17:14 +00001650 }
1651
1652 // If we couldn't find or insert a computation of this phi translated value,
1653 // we fail PRE.
1654 if (LoadPtr == 0) {
1655 DEBUG(dbgs() << "COULDN'T INSERT PHI TRANSLATED VALUE OF: "
1656 << *LI->getOperand(0) << "\n");
1657 CanDoPRE = false;
1658 break;
1659 }
1660
1661 // Make sure it is valid to move this load here. We have to watch out for:
1662 // @1 = getelementptr (i8* p, ...
1663 // test p and branch if == 0
1664 // load @1
1665 // It is valid to have the getelementptr before the test, even if p can be 0,
1666 // as getelementptr only does address arithmetic.
1667 // If we are not pushing the value through any multiple-successor blocks
1668 // we do not have this case. Otherwise, check that the load is safe to
1669 // put anywhere; this can be improved, but should be conservatively safe.
1670 if (!allSingleSucc &&
1671 // FIXME: REEVALUTE THIS.
1672 !isSafeToLoadUnconditionally(LoadPtr,
1673 UnavailablePred->getTerminator(),
1674 LI->getAlignment(), TD)) {
1675 CanDoPRE = false;
1676 break;
1677 }
1678
1679 I->second = LoadPtr;
Chris Lattner05e15f82009-12-09 01:59:31 +00001680 }
1681
Bob Wilson6cad4172010-02-01 21:17:14 +00001682 if (!CanDoPRE) {
1683 while (!NewInsts.empty())
1684 NewInsts.pop_back_val()->eraseFromParent();
Dale Johannesen42c3f552009-06-17 20:48:23 +00001685 return false;
Chris Lattner0c264b12009-11-28 16:08:18 +00001686 }
Dale Johannesen42c3f552009-06-17 20:48:23 +00001687
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001688 // Okay, we can eliminate this load by inserting a reload in the predecessor
1689 // and using PHI construction to get the value in the other predecessors, do
1690 // it.
David Greenebf7f78e2010-01-05 01:27:17 +00001691 DEBUG(dbgs() << "GVN REMOVING PRE LOAD: " << *LI << '\n');
Chris Lattner0c264b12009-11-28 16:08:18 +00001692 DEBUG(if (!NewInsts.empty())
David Greenebf7f78e2010-01-05 01:27:17 +00001693 dbgs() << "INSERTED " << NewInsts.size() << " INSTS: "
Chris Lattner0c264b12009-11-28 16:08:18 +00001694 << *NewInsts.back() << '\n');
1695
Bob Wilson6cad4172010-02-01 21:17:14 +00001696 // Assign value numbers to the new instructions.
1697 for (unsigned i = 0, e = NewInsts.size(); i != e; ++i) {
1698 // FIXME: We really _ought_ to insert these value numbers into their
1699 // parent's availability map. However, in doing so, we risk getting into
1700 // ordering issues. If a block hasn't been processed yet, we would be
1701 // marking a value as AVAIL-IN, which isn't what we intend.
1702 VN.lookup_or_add(NewInsts[i]);
1703 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001704
Bob Wilson6cad4172010-02-01 21:17:14 +00001705 for (DenseMap<BasicBlock*, Value*>::iterator I = PredLoads.begin(),
1706 E = PredLoads.end(); I != E; ++I) {
1707 BasicBlock *UnavailablePred = I->first;
1708 Value *LoadPtr = I->second;
1709
1710 Value *NewLoad = new LoadInst(LoadPtr, LI->getName()+".pre", false,
1711 LI->getAlignment(),
1712 UnavailablePred->getTerminator());
1713
1714 // Add the newly created load.
1715 ValuesPerBlock.push_back(AvailableValueInBlock::get(UnavailablePred,
1716 NewLoad));
Bob Wilson188f4282010-02-23 05:55:00 +00001717 MD->invalidateCachedPointerInfo(LoadPtr);
1718 DEBUG(dbgs() << "GVN INSERTED " << *NewLoad << '\n');
Bob Wilson6cad4172010-02-01 21:17:14 +00001719 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001720
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001721 // Perform PHI construction.
Chris Lattnerd2191e52009-12-21 23:15:48 +00001722 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD, *DT,
Chris Lattnera09fbf02009-10-10 23:50:30 +00001723 VN.getAliasAnalysis());
Chris Lattner771a5422009-09-20 20:09:34 +00001724 LI->replaceAllUsesWith(V);
1725 if (isa<PHINode>(V))
1726 V->takeName(LI);
Duncan Sands1df98592010-02-16 11:11:14 +00001727 if (V->getType()->isPointerTy())
Chris Lattner771a5422009-09-20 20:09:34 +00001728 MD->invalidateCachedPointerInfo(V);
Bob Wilson74175c22010-02-22 21:39:41 +00001729 VN.erase(LI);
Chris Lattnerc89c6a92008-12-02 08:16:11 +00001730 toErase.push_back(LI);
Dan Gohmanfe601042010-06-22 15:08:57 +00001731 ++NumPRELoad;
Owen Anderson0cd32032007-07-25 19:57:03 +00001732 return true;
1733}
1734
Owen Anderson62bc33c2007-08-16 22:02:55 +00001735/// processLoad - Attempt to eliminate a load, first by eliminating it
1736/// locally, and then attempting non-local elimination if that fails.
Chris Lattnerb51deb92008-12-05 21:04:20 +00001737bool GVN::processLoad(LoadInst *L, SmallVectorImpl<Instruction*> &toErase) {
Dan Gohman4ec01b22009-11-14 02:27:51 +00001738 if (!MD)
1739 return false;
1740
Chris Lattnerb51deb92008-12-05 21:04:20 +00001741 if (L->isVolatile())
Owen Anderson1ad2cb72007-07-24 17:55:58 +00001742 return false;
Daniel Dunbara279bc32009-09-20 02:20:51 +00001743
Owen Anderson1ad2cb72007-07-24 17:55:58 +00001744 // ... to a pointer that has been loaded from before...
Chris Lattnerb2412a82009-09-21 02:42:51 +00001745 MemDepResult Dep = MD->getDependency(L);
Daniel Dunbara279bc32009-09-20 02:20:51 +00001746
Chris Lattnerb51deb92008-12-05 21:04:20 +00001747 // If the value isn't available, don't do anything!
Chris Lattnerb2412a82009-09-21 02:42:51 +00001748 if (Dep.isClobber()) {
Chris Lattnereed919b2009-09-21 05:57:11 +00001749 // Check to see if we have something like this:
Chris Lattnerbb6495c2009-09-20 19:03:47 +00001750 // store i32 123, i32* %P
1751 // %A = bitcast i32* %P to i8*
1752 // %B = gep i8* %A, i32 1
1753 // %C = load i8* %B
1754 //
1755 // We could do that by recognizing if the clobber instructions are obviously
1756 // a common base + constant offset, and if the previous store (or memset)
1757 // completely covers this load. This sort of thing can happen in bitfield
1758 // access code.
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001759 Value *AvailVal = 0;
Chris Lattnereed919b2009-09-21 05:57:11 +00001760 if (StoreInst *DepSI = dyn_cast<StoreInst>(Dep.getInst()))
Chris Lattner1ce08292009-09-21 06:22:46 +00001761 if (const TargetData *TD = getAnalysisIfAvailable<TargetData>()) {
Chris Lattner4ca70fe2009-12-09 07:37:07 +00001762 int Offset = AnalyzeLoadFromClobberingStore(L->getType(),
1763 L->getPointerOperand(),
1764 DepSI, *TD);
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001765 if (Offset != -1)
1766 AvailVal = GetStoreValueForLoad(DepSI->getOperand(0), Offset,
1767 L->getType(), L, *TD);
Chris Lattner1ce08292009-09-21 06:22:46 +00001768 }
Chris Lattnereed919b2009-09-21 05:57:11 +00001769
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001770 // If the clobbering value is a memset/memcpy/memmove, see if we can forward
1771 // a value on from it.
1772 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(Dep.getInst())) {
1773 if (const TargetData *TD = getAnalysisIfAvailable<TargetData>()) {
Chris Lattner4ca70fe2009-12-09 07:37:07 +00001774 int Offset = AnalyzeLoadFromClobberingMemInst(L->getType(),
1775 L->getPointerOperand(),
1776 DepMI, *TD);
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001777 if (Offset != -1)
1778 AvailVal = GetMemInstValueForLoad(DepMI, Offset, L->getType(), L,*TD);
1779 }
1780 }
1781
1782 if (AvailVal) {
David Greenebf7f78e2010-01-05 01:27:17 +00001783 DEBUG(dbgs() << "GVN COERCED INST:\n" << *Dep.getInst() << '\n'
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001784 << *AvailVal << '\n' << *L << "\n\n\n");
1785
1786 // Replace the load!
1787 L->replaceAllUsesWith(AvailVal);
Duncan Sands1df98592010-02-16 11:11:14 +00001788 if (AvailVal->getType()->isPointerTy())
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001789 MD->invalidateCachedPointerInfo(AvailVal);
Bob Wilson74175c22010-02-22 21:39:41 +00001790 VN.erase(L);
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001791 toErase.push_back(L);
Dan Gohmanfe601042010-06-22 15:08:57 +00001792 ++NumGVNLoad;
Chris Lattnerfaf815b2009-12-06 01:57:02 +00001793 return true;
1794 }
1795
Torok Edwin3f3c6d42009-05-29 09:46:03 +00001796 DEBUG(
1797 // fast print dep, using operator<< on instruction would be too slow
David Greenebf7f78e2010-01-05 01:27:17 +00001798 dbgs() << "GVN: load ";
1799 WriteAsOperand(dbgs(), L);
Chris Lattnerb2412a82009-09-21 02:42:51 +00001800 Instruction *I = Dep.getInst();
David Greenebf7f78e2010-01-05 01:27:17 +00001801 dbgs() << " is clobbered by " << *I << '\n';
Torok Edwin3f3c6d42009-05-29 09:46:03 +00001802 );
Chris Lattnerb51deb92008-12-05 21:04:20 +00001803 return false;
Torok Edwin3f3c6d42009-05-29 09:46:03 +00001804 }
Chris Lattnerb51deb92008-12-05 21:04:20 +00001805
1806 // If it is defined in another block, try harder.
Chris Lattnerb2412a82009-09-21 02:42:51 +00001807 if (Dep.isNonLocal())
Chris Lattnerb51deb92008-12-05 21:04:20 +00001808 return processNonLocalLoad(L, toErase);
Eli Friedmanb6c36e42008-02-12 12:08:14 +00001809
Chris Lattnerb2412a82009-09-21 02:42:51 +00001810 Instruction *DepInst = Dep.getInst();
Chris Lattnerb51deb92008-12-05 21:04:20 +00001811 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInst)) {
Chris Lattnerbb6495c2009-09-20 19:03:47 +00001812 Value *StoredVal = DepSI->getOperand(0);
1813
1814 // The store and load are to a must-aliased pointer, but they may not
1815 // actually have the same type. See if we know how to reuse the stored
1816 // value (depending on its type).
1817 const TargetData *TD = 0;
Chris Lattnera52fce42009-10-21 04:11:19 +00001818 if (StoredVal->getType() != L->getType()) {
1819 if ((TD = getAnalysisIfAvailable<TargetData>())) {
1820 StoredVal = CoerceAvailableValueToLoadType(StoredVal, L->getType(),
1821 L, *TD);
1822 if (StoredVal == 0)
1823 return false;
1824
David Greenebf7f78e2010-01-05 01:27:17 +00001825 DEBUG(dbgs() << "GVN COERCED STORE:\n" << *DepSI << '\n' << *StoredVal
Chris Lattnera52fce42009-10-21 04:11:19 +00001826 << '\n' << *L << "\n\n\n");
1827 }
1828 else
Chris Lattnerbb6495c2009-09-20 19:03:47 +00001829 return false;
Chris Lattnerbb6495c2009-09-20 19:03:47 +00001830 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001831
Chris Lattnerb51deb92008-12-05 21:04:20 +00001832 // Remove it!
Chris Lattnerbb6495c2009-09-20 19:03:47 +00001833 L->replaceAllUsesWith(StoredVal);
Duncan Sands1df98592010-02-16 11:11:14 +00001834 if (StoredVal->getType()->isPointerTy())
Chris Lattnerbb6495c2009-09-20 19:03:47 +00001835 MD->invalidateCachedPointerInfo(StoredVal);
Bob Wilson74175c22010-02-22 21:39:41 +00001836 VN.erase(L);
Chris Lattnerb51deb92008-12-05 21:04:20 +00001837 toErase.push_back(L);
Dan Gohmanfe601042010-06-22 15:08:57 +00001838 ++NumGVNLoad;
Chris Lattnerb51deb92008-12-05 21:04:20 +00001839 return true;
1840 }
1841
1842 if (LoadInst *DepLI = dyn_cast<LoadInst>(DepInst)) {
Chris Lattnerbb6495c2009-09-20 19:03:47 +00001843 Value *AvailableVal = DepLI;
1844
1845 // The loads are of a must-aliased pointer, but they may not actually have
1846 // the same type. See if we know how to reuse the previously loaded value
1847 // (depending on its type).
1848 const TargetData *TD = 0;
Chris Lattnera52fce42009-10-21 04:11:19 +00001849 if (DepLI->getType() != L->getType()) {
1850 if ((TD = getAnalysisIfAvailable<TargetData>())) {
1851 AvailableVal = CoerceAvailableValueToLoadType(DepLI, L->getType(), L,*TD);
1852 if (AvailableVal == 0)
1853 return false;
Chris Lattnerbb6495c2009-09-20 19:03:47 +00001854
David Greenebf7f78e2010-01-05 01:27:17 +00001855 DEBUG(dbgs() << "GVN COERCED LOAD:\n" << *DepLI << "\n" << *AvailableVal
Chris Lattnera52fce42009-10-21 04:11:19 +00001856 << "\n" << *L << "\n\n\n");
1857 }
1858 else
1859 return false;
Chris Lattnerbb6495c2009-09-20 19:03:47 +00001860 }
1861
Chris Lattnerb51deb92008-12-05 21:04:20 +00001862 // Remove it!
Chris Lattnerbb6495c2009-09-20 19:03:47 +00001863 L->replaceAllUsesWith(AvailableVal);
Duncan Sands1df98592010-02-16 11:11:14 +00001864 if (DepLI->getType()->isPointerTy())
Chris Lattnerbc99be12008-12-09 22:06:23 +00001865 MD->invalidateCachedPointerInfo(DepLI);
Bob Wilson74175c22010-02-22 21:39:41 +00001866 VN.erase(L);
Chris Lattnerb51deb92008-12-05 21:04:20 +00001867 toErase.push_back(L);
Dan Gohmanfe601042010-06-22 15:08:57 +00001868 ++NumGVNLoad;
Chris Lattnerb51deb92008-12-05 21:04:20 +00001869 return true;
1870 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001871
Chris Lattner237a8282008-11-30 01:39:32 +00001872 // If this load really doesn't depend on anything, then we must be loading an
1873 // undef value. This can happen when loading for a fresh allocation with no
1874 // intervening stores, for example.
Victor Hernandez7b929da2009-10-23 21:09:37 +00001875 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst)) {
Owen Anderson9e9a0d52009-07-30 23:03:37 +00001876 L->replaceAllUsesWith(UndefValue::get(L->getType()));
Bob Wilson74175c22010-02-22 21:39:41 +00001877 VN.erase(L);
Chris Lattner237a8282008-11-30 01:39:32 +00001878 toErase.push_back(L);
Dan Gohmanfe601042010-06-22 15:08:57 +00001879 ++NumGVNLoad;
Chris Lattnerb51deb92008-12-05 21:04:20 +00001880 return true;
Eli Friedmanb6c36e42008-02-12 12:08:14 +00001881 }
Owen Andersonb62f7922009-10-28 07:05:35 +00001882
Owen Anderson9ff5a232009-12-02 07:35:19 +00001883 // If this load occurs either right after a lifetime begin,
Owen Andersonb62f7922009-10-28 07:05:35 +00001884 // then the loaded value is undefined.
1885 if (IntrinsicInst* II = dyn_cast<IntrinsicInst>(DepInst)) {
Owen Anderson9ff5a232009-12-02 07:35:19 +00001886 if (II->getIntrinsicID() == Intrinsic::lifetime_start) {
Owen Andersonb62f7922009-10-28 07:05:35 +00001887 L->replaceAllUsesWith(UndefValue::get(L->getType()));
Bob Wilson74175c22010-02-22 21:39:41 +00001888 VN.erase(L);
Owen Andersonb62f7922009-10-28 07:05:35 +00001889 toErase.push_back(L);
Dan Gohmanfe601042010-06-22 15:08:57 +00001890 ++NumGVNLoad;
Owen Andersonb62f7922009-10-28 07:05:35 +00001891 return true;
1892 }
1893 }
Eli Friedmanb6c36e42008-02-12 12:08:14 +00001894
Chris Lattnerb51deb92008-12-05 21:04:20 +00001895 return false;
Owen Anderson1ad2cb72007-07-24 17:55:58 +00001896}
1897
Chris Lattnerb2412a82009-09-21 02:42:51 +00001898Value *GVN::lookupNumber(BasicBlock *BB, uint32_t num) {
Owen Andersonb70a5712008-06-23 17:49:45 +00001899 DenseMap<BasicBlock*, ValueNumberScope*>::iterator I = localAvail.find(BB);
1900 if (I == localAvail.end())
1901 return 0;
Daniel Dunbara279bc32009-09-20 02:20:51 +00001902
Chris Lattnerb2412a82009-09-21 02:42:51 +00001903 ValueNumberScope *Locals = I->second;
1904 while (Locals) {
1905 DenseMap<uint32_t, Value*>::iterator I = Locals->table.find(num);
1906 if (I != Locals->table.end())
Owen Anderson6fafe842008-06-20 01:15:47 +00001907 return I->second;
Chris Lattnerb2412a82009-09-21 02:42:51 +00001908 Locals = Locals->parent;
Owen Anderson6fafe842008-06-20 01:15:47 +00001909 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001910
Owen Anderson6fafe842008-06-20 01:15:47 +00001911 return 0;
1912}
1913
Owen Anderson255dafc2008-12-15 02:03:00 +00001914
Owen Anderson36057c72007-08-14 18:16:29 +00001915/// processInstruction - When calculating availability, handle an instruction
Owen Anderson1ad2cb72007-07-24 17:55:58 +00001916/// by inserting it into the appropriate sets
Owen Andersonaf4240a2008-06-12 19:25:32 +00001917bool GVN::processInstruction(Instruction *I,
Chris Lattner8e1e95c2008-03-21 22:01:16 +00001918 SmallVectorImpl<Instruction*> &toErase) {
Devang Patelbe905e22010-02-11 00:20:49 +00001919 // Ignore dbg info intrinsics.
1920 if (isa<DbgInfoIntrinsic>(I))
1921 return false;
1922
Chris Lattnerb2412a82009-09-21 02:42:51 +00001923 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
1924 bool Changed = processLoad(LI, toErase);
Daniel Dunbara279bc32009-09-20 02:20:51 +00001925
Chris Lattnerb2412a82009-09-21 02:42:51 +00001926 if (!Changed) {
1927 unsigned Num = VN.lookup_or_add(LI);
1928 localAvail[I->getParent()]->table.insert(std::make_pair(Num, LI));
Owen Andersonb2303722008-06-18 21:41:49 +00001929 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001930
Chris Lattnerb2412a82009-09-21 02:42:51 +00001931 return Changed;
Owen Andersonb2303722008-06-18 21:41:49 +00001932 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001933
Chris Lattnerb2412a82009-09-21 02:42:51 +00001934 uint32_t NextNum = VN.getNextUnusedValueNumber();
1935 unsigned Num = VN.lookup_or_add(I);
Daniel Dunbara279bc32009-09-20 02:20:51 +00001936
Chris Lattnerb2412a82009-09-21 02:42:51 +00001937 if (BranchInst *BI = dyn_cast<BranchInst>(I)) {
1938 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbara279bc32009-09-20 02:20:51 +00001939
Owen Andersone8a290f2009-04-01 23:53:49 +00001940 if (!BI->isConditional() || isa<Constant>(BI->getCondition()))
1941 return false;
Daniel Dunbara279bc32009-09-20 02:20:51 +00001942
Chris Lattnerb2412a82009-09-21 02:42:51 +00001943 Value *BranchCond = BI->getCondition();
1944 uint32_t CondVN = VN.lookup_or_add(BranchCond);
Daniel Dunbara279bc32009-09-20 02:20:51 +00001945
Chris Lattnerb2412a82009-09-21 02:42:51 +00001946 BasicBlock *TrueSucc = BI->getSuccessor(0);
1947 BasicBlock *FalseSucc = BI->getSuccessor(1);
Daniel Dunbara279bc32009-09-20 02:20:51 +00001948
Chris Lattnerb2412a82009-09-21 02:42:51 +00001949 if (TrueSucc->getSinglePredecessor())
1950 localAvail[TrueSucc]->table[CondVN] =
1951 ConstantInt::getTrue(TrueSucc->getContext());
1952 if (FalseSucc->getSinglePredecessor())
1953 localAvail[FalseSucc]->table[CondVN] =
1954 ConstantInt::getFalse(TrueSucc->getContext());
Owen Andersone8a290f2009-04-01 23:53:49 +00001955
1956 return false;
Daniel Dunbara279bc32009-09-20 02:20:51 +00001957
Owen Andersone5ffa902008-04-07 09:59:07 +00001958 // Allocations are always uniquely numbered, so we can save time and memory
Daniel Dunbara279bc32009-09-20 02:20:51 +00001959 // by fast failing them.
Victor Hernandez7b929da2009-10-23 21:09:37 +00001960 } else if (isa<AllocaInst>(I) || isa<TerminatorInst>(I)) {
Chris Lattnerb2412a82009-09-21 02:42:51 +00001961 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Andersone5ffa902008-04-07 09:59:07 +00001962 return false;
Owen Andersonb2303722008-06-18 21:41:49 +00001963 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001964
Owen Anderson62bc33c2007-08-16 22:02:55 +00001965 // Collapse PHI nodes
Owen Anderson31f49672007-08-14 18:33:27 +00001966 if (PHINode* p = dyn_cast<PHINode>(I)) {
Chris Lattnerb2412a82009-09-21 02:42:51 +00001967 Value *constVal = CollapsePhi(p);
Daniel Dunbara279bc32009-09-20 02:20:51 +00001968
Owen Anderson31f49672007-08-14 18:33:27 +00001969 if (constVal) {
Owen Anderson1defe2d2007-08-16 22:51:56 +00001970 p->replaceAllUsesWith(constVal);
Duncan Sands1df98592010-02-16 11:11:14 +00001971 if (MD && constVal->getType()->isPointerTy())
Chris Lattnerbc99be12008-12-09 22:06:23 +00001972 MD->invalidateCachedPointerInfo(constVal);
Owen Andersonae53c932008-12-23 00:49:51 +00001973 VN.erase(p);
Daniel Dunbara279bc32009-09-20 02:20:51 +00001974
Owen Anderson1defe2d2007-08-16 22:51:56 +00001975 toErase.push_back(p);
Owen Andersonb2303722008-06-18 21:41:49 +00001976 } else {
Chris Lattnerb2412a82009-09-21 02:42:51 +00001977 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Anderson31f49672007-08-14 18:33:27 +00001978 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00001979
Owen Anderson0ae33ef2008-07-03 17:44:33 +00001980 // If the number we were assigned was a brand new VN, then we don't
1981 // need to do a lookup to see if the number already exists
1982 // somewhere in the domtree: it can't!
Chris Lattnerb2412a82009-09-21 02:42:51 +00001983 } else if (Num == NextNum) {
1984 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbara279bc32009-09-20 02:20:51 +00001985
Owen Anderson255dafc2008-12-15 02:03:00 +00001986 // Perform fast-path value-number based elimination of values inherited from
1987 // dominators.
Chris Lattnerb2412a82009-09-21 02:42:51 +00001988 } else if (Value *repl = lookupNumber(I->getParent(), Num)) {
Owen Anderson5fc4aba2007-12-08 01:37:09 +00001989 // Remove it!
Owen Andersonbf7d0bc2007-07-31 23:27:13 +00001990 VN.erase(I);
Owen Anderson1ad2cb72007-07-24 17:55:58 +00001991 I->replaceAllUsesWith(repl);
Duncan Sands1df98592010-02-16 11:11:14 +00001992 if (MD && repl->getType()->isPointerTy())
Chris Lattnerbc99be12008-12-09 22:06:23 +00001993 MD->invalidateCachedPointerInfo(repl);
Owen Anderson1ad2cb72007-07-24 17:55:58 +00001994 toErase.push_back(I);
1995 return true;
Owen Anderson255dafc2008-12-15 02:03:00 +00001996
Owen Anderson0ae33ef2008-07-03 17:44:33 +00001997 } else {
Chris Lattnerb2412a82009-09-21 02:42:51 +00001998 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Anderson1ad2cb72007-07-24 17:55:58 +00001999 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00002000
Owen Anderson1ad2cb72007-07-24 17:55:58 +00002001 return false;
2002}
2003
Bill Wendling30788b82008-12-22 22:32:22 +00002004/// runOnFunction - This is the main transformation entry point for a function.
Owen Anderson3e75a422007-08-14 18:04:11 +00002005bool GVN::runOnFunction(Function& F) {
Dan Gohman4ec01b22009-11-14 02:27:51 +00002006 if (!NoLoads)
2007 MD = &getAnalysis<MemoryDependenceAnalysis>();
Chris Lattner663e4412008-12-01 00:40:32 +00002008 DT = &getAnalysis<DominatorTree>();
Owen Andersona472c4a2008-05-12 20:15:55 +00002009 VN.setAliasAnalysis(&getAnalysis<AliasAnalysis>());
Chris Lattner663e4412008-12-01 00:40:32 +00002010 VN.setMemDep(MD);
2011 VN.setDomTree(DT);
Daniel Dunbara279bc32009-09-20 02:20:51 +00002012
Chris Lattnerb2412a82009-09-21 02:42:51 +00002013 bool Changed = false;
2014 bool ShouldContinue = true;
Daniel Dunbara279bc32009-09-20 02:20:51 +00002015
Owen Anderson5d0af032008-07-16 17:52:31 +00002016 // Merge unconditional branches, allowing PRE to catch more
2017 // optimization opportunities.
2018 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ) {
Chris Lattnerb2412a82009-09-21 02:42:51 +00002019 BasicBlock *BB = FI;
Owen Anderson5d0af032008-07-16 17:52:31 +00002020 ++FI;
Owen Andersonb31b06d2008-07-17 00:01:40 +00002021 bool removedBlock = MergeBlockIntoPredecessor(BB, this);
Dan Gohmanfe601042010-06-22 15:08:57 +00002022 if (removedBlock) ++NumGVNBlocks;
Daniel Dunbara279bc32009-09-20 02:20:51 +00002023
Chris Lattnerb2412a82009-09-21 02:42:51 +00002024 Changed |= removedBlock;
Owen Anderson5d0af032008-07-16 17:52:31 +00002025 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00002026
Chris Lattnerae199312008-12-09 19:21:47 +00002027 unsigned Iteration = 0;
Daniel Dunbara279bc32009-09-20 02:20:51 +00002028
Chris Lattnerb2412a82009-09-21 02:42:51 +00002029 while (ShouldContinue) {
David Greenebf7f78e2010-01-05 01:27:17 +00002030 DEBUG(dbgs() << "GVN iteration: " << Iteration << "\n");
Chris Lattnerb2412a82009-09-21 02:42:51 +00002031 ShouldContinue = iterateOnFunction(F);
Bob Wilson484d4a32010-02-16 19:51:59 +00002032 if (splitCriticalEdges())
2033 ShouldContinue = true;
Chris Lattnerb2412a82009-09-21 02:42:51 +00002034 Changed |= ShouldContinue;
Chris Lattnerae199312008-12-09 19:21:47 +00002035 ++Iteration;
Owen Anderson3e75a422007-08-14 18:04:11 +00002036 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00002037
Owen Andersone98c54c2008-07-18 18:03:38 +00002038 if (EnablePRE) {
Owen Anderson0c7f91c2008-09-03 23:06:07 +00002039 bool PREChanged = true;
2040 while (PREChanged) {
2041 PREChanged = performPRE(F);
Chris Lattnerb2412a82009-09-21 02:42:51 +00002042 Changed |= PREChanged;
Owen Anderson0c7f91c2008-09-03 23:06:07 +00002043 }
Owen Andersone98c54c2008-07-18 18:03:38 +00002044 }
Chris Lattnerae199312008-12-09 19:21:47 +00002045 // FIXME: Should perform GVN again after PRE does something. PRE can move
2046 // computations into blocks where they become fully redundant. Note that
2047 // we can't do this until PRE's critical edge splitting updates memdep.
2048 // Actually, when this happens, we should just fully integrate PRE into GVN.
Nuno Lopes7cdd9ee2008-10-10 16:25:50 +00002049
2050 cleanupGlobalSets();
2051
Chris Lattnerb2412a82009-09-21 02:42:51 +00002052 return Changed;
Owen Anderson3e75a422007-08-14 18:04:11 +00002053}
2054
2055
Chris Lattnerb2412a82009-09-21 02:42:51 +00002056bool GVN::processBlock(BasicBlock *BB) {
Chris Lattnerae199312008-12-09 19:21:47 +00002057 // FIXME: Kill off toErase by doing erasing eagerly in a helper function (and
2058 // incrementing BI before processing an instruction).
Owen Andersonaf4240a2008-06-12 19:25:32 +00002059 SmallVector<Instruction*, 8> toErase;
Chris Lattnerb2412a82009-09-21 02:42:51 +00002060 bool ChangedFunction = false;
Daniel Dunbara279bc32009-09-20 02:20:51 +00002061
Owen Andersonaf4240a2008-06-12 19:25:32 +00002062 for (BasicBlock::iterator BI = BB->begin(), BE = BB->end();
2063 BI != BE;) {
Chris Lattnerb2412a82009-09-21 02:42:51 +00002064 ChangedFunction |= processInstruction(BI, toErase);
Owen Andersonaf4240a2008-06-12 19:25:32 +00002065 if (toErase.empty()) {
2066 ++BI;
2067 continue;
2068 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00002069
Owen Andersonaf4240a2008-06-12 19:25:32 +00002070 // If we need some instructions deleted, do it now.
2071 NumGVNInstr += toErase.size();
Daniel Dunbara279bc32009-09-20 02:20:51 +00002072
Owen Andersonaf4240a2008-06-12 19:25:32 +00002073 // Avoid iterator invalidation.
2074 bool AtStart = BI == BB->begin();
2075 if (!AtStart)
2076 --BI;
2077
2078 for (SmallVector<Instruction*, 4>::iterator I = toErase.begin(),
Chris Lattner663e4412008-12-01 00:40:32 +00002079 E = toErase.end(); I != E; ++I) {
David Greenebf7f78e2010-01-05 01:27:17 +00002080 DEBUG(dbgs() << "GVN removed: " << **I << '\n');
Dan Gohman4ec01b22009-11-14 02:27:51 +00002081 if (MD) MD->removeInstruction(*I);
Owen Andersonaf4240a2008-06-12 19:25:32 +00002082 (*I)->eraseFromParent();
Bill Wendlingec40d502008-12-22 21:57:30 +00002083 DEBUG(verifyRemoved(*I));
Chris Lattner663e4412008-12-01 00:40:32 +00002084 }
Chris Lattnerae199312008-12-09 19:21:47 +00002085 toErase.clear();
Owen Andersonaf4240a2008-06-12 19:25:32 +00002086
2087 if (AtStart)
2088 BI = BB->begin();
2089 else
2090 ++BI;
Owen Andersonaf4240a2008-06-12 19:25:32 +00002091 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00002092
Chris Lattnerb2412a82009-09-21 02:42:51 +00002093 return ChangedFunction;
Owen Andersonaf4240a2008-06-12 19:25:32 +00002094}
2095
Owen Andersonb2303722008-06-18 21:41:49 +00002096/// performPRE - Perform a purely local form of PRE that looks for diamond
2097/// control flow patterns and attempts to perform simple PRE at the join point.
Chris Lattnerfb6e7012009-10-31 22:11:15 +00002098bool GVN::performPRE(Function &F) {
Chris Lattnerd0f5bfc2008-12-01 07:35:54 +00002099 bool Changed = false;
Chris Lattner09713792008-12-01 07:29:03 +00002100 DenseMap<BasicBlock*, Value*> predMap;
Owen Andersonb2303722008-06-18 21:41:49 +00002101 for (df_iterator<BasicBlock*> DI = df_begin(&F.getEntryBlock()),
2102 DE = df_end(&F.getEntryBlock()); DI != DE; ++DI) {
Chris Lattnerb2412a82009-09-21 02:42:51 +00002103 BasicBlock *CurrentBlock = *DI;
Daniel Dunbara279bc32009-09-20 02:20:51 +00002104
Owen Andersonb2303722008-06-18 21:41:49 +00002105 // Nothing to PRE in the entry block.
2106 if (CurrentBlock == &F.getEntryBlock()) continue;
Daniel Dunbara279bc32009-09-20 02:20:51 +00002107
Owen Andersonb2303722008-06-18 21:41:49 +00002108 for (BasicBlock::iterator BI = CurrentBlock->begin(),
2109 BE = CurrentBlock->end(); BI != BE; ) {
Chris Lattnerd0f5bfc2008-12-01 07:35:54 +00002110 Instruction *CurInst = BI++;
Duncan Sands7af1c782009-05-06 06:49:50 +00002111
Victor Hernandez7b929da2009-10-23 21:09:37 +00002112 if (isa<AllocaInst>(CurInst) ||
Victor Hernandez83d63912009-09-18 22:35:49 +00002113 isa<TerminatorInst>(CurInst) || isa<PHINode>(CurInst) ||
Devang Patel9674d152009-10-14 17:29:00 +00002114 CurInst->getType()->isVoidTy() ||
Duncan Sands7af1c782009-05-06 06:49:50 +00002115 CurInst->mayReadFromMemory() || CurInst->mayHaveSideEffects() ||
John Criswell090c0a22009-03-10 15:04:53 +00002116 isa<DbgInfoIntrinsic>(CurInst))
Chris Lattnerd0f5bfc2008-12-01 07:35:54 +00002117 continue;
Owen Anderson5015b342010-08-07 00:20:35 +00002118
2119 // We don't currently value number ANY inline asm calls.
2120 if (CallInst *CallI = dyn_cast<CallInst>(CurInst))
2121 if (CallI->isInlineAsm())
2122 continue;
Duncan Sands7af1c782009-05-06 06:49:50 +00002123
Chris Lattnerb2412a82009-09-21 02:42:51 +00002124 uint32_t ValNo = VN.lookup(CurInst);
Daniel Dunbara279bc32009-09-20 02:20:51 +00002125
Owen Andersonb2303722008-06-18 21:41:49 +00002126 // Look for the predecessors for PRE opportunities. We're
2127 // only trying to solve the basic diamond case, where
2128 // a value is computed in the successor and one predecessor,
2129 // but not the other. We also explicitly disallow cases
2130 // where the successor is its own predecessor, because they're
2131 // more complicated to get right.
Chris Lattnerb2412a82009-09-21 02:42:51 +00002132 unsigned NumWith = 0;
2133 unsigned NumWithout = 0;
2134 BasicBlock *PREPred = 0;
Chris Lattner09713792008-12-01 07:29:03 +00002135 predMap.clear();
2136
Owen Andersonb2303722008-06-18 21:41:49 +00002137 for (pred_iterator PI = pred_begin(CurrentBlock),
2138 PE = pred_end(CurrentBlock); PI != PE; ++PI) {
Gabor Greif08149852010-07-09 14:36:49 +00002139 BasicBlock *P = *PI;
Owen Andersonb2303722008-06-18 21:41:49 +00002140 // We're not interested in PRE where the block is its
Bob Wilsone7b635f2010-02-03 00:33:21 +00002141 // own predecessor, or in blocks with predecessors
Owen Anderson6fafe842008-06-20 01:15:47 +00002142 // that are not reachable.
Gabor Greif08149852010-07-09 14:36:49 +00002143 if (P == CurrentBlock) {
Chris Lattnerb2412a82009-09-21 02:42:51 +00002144 NumWithout = 2;
Owen Anderson6fafe842008-06-20 01:15:47 +00002145 break;
Gabor Greif08149852010-07-09 14:36:49 +00002146 } else if (!localAvail.count(P)) {
Chris Lattnerb2412a82009-09-21 02:42:51 +00002147 NumWithout = 2;
Owen Anderson6fafe842008-06-20 01:15:47 +00002148 break;
2149 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00002150
2151 DenseMap<uint32_t, Value*>::iterator predV =
Gabor Greif08149852010-07-09 14:36:49 +00002152 localAvail[P]->table.find(ValNo);
2153 if (predV == localAvail[P]->table.end()) {
2154 PREPred = P;
Dan Gohmanfe601042010-06-22 15:08:57 +00002155 ++NumWithout;
Chris Lattnerd0f5bfc2008-12-01 07:35:54 +00002156 } else if (predV->second == CurInst) {
Chris Lattnerb2412a82009-09-21 02:42:51 +00002157 NumWithout = 2;
Owen Andersonb2303722008-06-18 21:41:49 +00002158 } else {
Gabor Greif08149852010-07-09 14:36:49 +00002159 predMap[P] = predV->second;
Dan Gohmanfe601042010-06-22 15:08:57 +00002160 ++NumWith;
Owen Andersonb2303722008-06-18 21:41:49 +00002161 }
2162 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00002163
Owen Andersonb2303722008-06-18 21:41:49 +00002164 // Don't do PRE when it might increase code size, i.e. when
2165 // we would need to insert instructions in more than one pred.
Chris Lattnerb2412a82009-09-21 02:42:51 +00002166 if (NumWithout != 1 || NumWith == 0)
Owen Andersonb2303722008-06-18 21:41:49 +00002167 continue;
Chris Lattnerfb6e7012009-10-31 22:11:15 +00002168
2169 // Don't do PRE across indirect branch.
2170 if (isa<IndirectBrInst>(PREPred->getTerminator()))
2171 continue;
Daniel Dunbara279bc32009-09-20 02:20:51 +00002172
Owen Anderson5c274ee2008-06-19 19:54:19 +00002173 // We can't do PRE safely on a critical edge, so instead we schedule
2174 // the edge to be split and perform the PRE the next time we iterate
2175 // on the function.
Bob Wilsonae23daf2010-02-16 21:06:42 +00002176 unsigned SuccNum = GetSuccessorNumber(PREPred, CurrentBlock);
Chris Lattnerb2412a82009-09-21 02:42:51 +00002177 if (isCriticalEdge(PREPred->getTerminator(), SuccNum)) {
2178 toSplit.push_back(std::make_pair(PREPred->getTerminator(), SuccNum));
Owen Anderson5c274ee2008-06-19 19:54:19 +00002179 continue;
2180 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00002181
Bob Wilsone7b635f2010-02-03 00:33:21 +00002182 // Instantiate the expression in the predecessor that lacked it.
Owen Andersonb2303722008-06-18 21:41:49 +00002183 // Because we are going top-down through the block, all value numbers
2184 // will be available in the predecessor by the time we need them. Any
Bob Wilsone7b635f2010-02-03 00:33:21 +00002185 // that weren't originally present will have been instantiated earlier
Owen Andersonb2303722008-06-18 21:41:49 +00002186 // in this loop.
Nick Lewycky67760642009-09-27 07:38:41 +00002187 Instruction *PREInstr = CurInst->clone();
Owen Andersonb2303722008-06-18 21:41:49 +00002188 bool success = true;
Chris Lattnerd0f5bfc2008-12-01 07:35:54 +00002189 for (unsigned i = 0, e = CurInst->getNumOperands(); i != e; ++i) {
2190 Value *Op = PREInstr->getOperand(i);
2191 if (isa<Argument>(Op) || isa<Constant>(Op) || isa<GlobalValue>(Op))
2192 continue;
Daniel Dunbara279bc32009-09-20 02:20:51 +00002193
Chris Lattnerd0f5bfc2008-12-01 07:35:54 +00002194 if (Value *V = lookupNumber(PREPred, VN.lookup(Op))) {
2195 PREInstr->setOperand(i, V);
2196 } else {
2197 success = false;
2198 break;
Owen Andersonc45996b2008-07-11 20:05:13 +00002199 }
Owen Andersonb2303722008-06-18 21:41:49 +00002200 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00002201
Owen Andersonb2303722008-06-18 21:41:49 +00002202 // Fail out if we encounter an operand that is not available in
Daniel Dunbara279bc32009-09-20 02:20:51 +00002203 // the PRE predecessor. This is typically because of loads which
Owen Andersonb2303722008-06-18 21:41:49 +00002204 // are not value numbered precisely.
2205 if (!success) {
2206 delete PREInstr;
Bill Wendling70ded192008-12-22 22:14:07 +00002207 DEBUG(verifyRemoved(PREInstr));
Owen Andersonb2303722008-06-18 21:41:49 +00002208 continue;
2209 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00002210
Owen Andersonb2303722008-06-18 21:41:49 +00002211 PREInstr->insertBefore(PREPred->getTerminator());
Chris Lattnerd0f5bfc2008-12-01 07:35:54 +00002212 PREInstr->setName(CurInst->getName() + ".pre");
Owen Anderson6fafe842008-06-20 01:15:47 +00002213 predMap[PREPred] = PREInstr;
Chris Lattnerb2412a82009-09-21 02:42:51 +00002214 VN.add(PREInstr, ValNo);
Dan Gohmanfe601042010-06-22 15:08:57 +00002215 ++NumGVNPRE;
Daniel Dunbara279bc32009-09-20 02:20:51 +00002216
Owen Andersonb2303722008-06-18 21:41:49 +00002217 // Update the availability map to include the new instruction.
Chris Lattnerb2412a82009-09-21 02:42:51 +00002218 localAvail[PREPred]->table.insert(std::make_pair(ValNo, PREInstr));
Daniel Dunbara279bc32009-09-20 02:20:51 +00002219
Owen Andersonb2303722008-06-18 21:41:49 +00002220 // Create a PHI to make the value available in this block.
Chris Lattnerd0f5bfc2008-12-01 07:35:54 +00002221 PHINode* Phi = PHINode::Create(CurInst->getType(),
2222 CurInst->getName() + ".pre-phi",
Owen Andersonb2303722008-06-18 21:41:49 +00002223 CurrentBlock->begin());
2224 for (pred_iterator PI = pred_begin(CurrentBlock),
Gabor Greif1d3ae022010-07-09 14:48:08 +00002225 PE = pred_end(CurrentBlock); PI != PE; ++PI) {
2226 BasicBlock *P = *PI;
2227 Phi->addIncoming(predMap[P], P);
2228 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00002229
Chris Lattnerb2412a82009-09-21 02:42:51 +00002230 VN.add(Phi, ValNo);
2231 localAvail[CurrentBlock]->table[ValNo] = Phi;
Daniel Dunbara279bc32009-09-20 02:20:51 +00002232
Chris Lattnerd0f5bfc2008-12-01 07:35:54 +00002233 CurInst->replaceAllUsesWith(Phi);
Duncan Sands1df98592010-02-16 11:11:14 +00002234 if (MD && Phi->getType()->isPointerTy())
Chris Lattnerbc99be12008-12-09 22:06:23 +00002235 MD->invalidateCachedPointerInfo(Phi);
Chris Lattnerd0f5bfc2008-12-01 07:35:54 +00002236 VN.erase(CurInst);
Daniel Dunbara279bc32009-09-20 02:20:51 +00002237
David Greenebf7f78e2010-01-05 01:27:17 +00002238 DEBUG(dbgs() << "GVN PRE removed: " << *CurInst << '\n');
Dan Gohman4ec01b22009-11-14 02:27:51 +00002239 if (MD) MD->removeInstruction(CurInst);
Chris Lattnerd0f5bfc2008-12-01 07:35:54 +00002240 CurInst->eraseFromParent();
Bill Wendlingec40d502008-12-22 21:57:30 +00002241 DEBUG(verifyRemoved(CurInst));
Chris Lattnerd0f5bfc2008-12-01 07:35:54 +00002242 Changed = true;
Owen Andersonb2303722008-06-18 21:41:49 +00002243 }
2244 }
Daniel Dunbara279bc32009-09-20 02:20:51 +00002245
Bob Wilson484d4a32010-02-16 19:51:59 +00002246 if (splitCriticalEdges())
2247 Changed = true;
Daniel Dunbara279bc32009-09-20 02:20:51 +00002248
Bob Wilson484d4a32010-02-16 19:51:59 +00002249 return Changed;
2250}
2251
2252/// splitCriticalEdges - Split critical edges found during the previous
2253/// iteration that may enable further optimization.
2254bool GVN::splitCriticalEdges() {
2255 if (toSplit.empty())
2256 return false;
2257 do {
2258 std::pair<TerminatorInst*, unsigned> Edge = toSplit.pop_back_val();
2259 SplitCriticalEdge(Edge.first, Edge.second, this);
2260 } while (!toSplit.empty());
Evan Cheng19d417c2010-03-01 22:23:12 +00002261 if (MD) MD->invalidateCachedPredecessors();
Bob Wilson484d4a32010-02-16 19:51:59 +00002262 return true;
Owen Andersonb2303722008-06-18 21:41:49 +00002263}
2264
Bill Wendling30788b82008-12-22 22:32:22 +00002265/// iterateOnFunction - Executes one iteration of GVN
Owen Anderson3e75a422007-08-14 18:04:11 +00002266bool GVN::iterateOnFunction(Function &F) {
Nuno Lopes7cdd9ee2008-10-10 16:25:50 +00002267 cleanupGlobalSets();
Chris Lattner2e607012008-03-21 21:33:23 +00002268
Owen Andersone8a290f2009-04-01 23:53:49 +00002269 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2270 DE = df_end(DT->getRootNode()); DI != DE; ++DI) {
2271 if (DI->getIDom())
2272 localAvail[DI->getBlock()] =
2273 new ValueNumberScope(localAvail[DI->getIDom()->getBlock()]);
2274 else
2275 localAvail[DI->getBlock()] = new ValueNumberScope(0);
2276 }
2277
Owen Anderson1ad2cb72007-07-24 17:55:58 +00002278 // Top-down walk of the dominator tree
Chris Lattnerb2412a82009-09-21 02:42:51 +00002279 bool Changed = false;
Owen Andersonc34d1122008-12-15 03:52:17 +00002280#if 0
2281 // Needed for value numbering with phi construction to work.
Owen Anderson255dafc2008-12-15 02:03:00 +00002282 ReversePostOrderTraversal<Function*> RPOT(&F);
2283 for (ReversePostOrderTraversal<Function*>::rpo_iterator RI = RPOT.begin(),
2284 RE = RPOT.end(); RI != RE; ++RI)
Chris Lattnerb2412a82009-09-21 02:42:51 +00002285 Changed |= processBlock(*RI);
Owen Andersonc34d1122008-12-15 03:52:17 +00002286#else
2287 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2288 DE = df_end(DT->getRootNode()); DI != DE; ++DI)
Chris Lattnerb2412a82009-09-21 02:42:51 +00002289 Changed |= processBlock(DI->getBlock());
Owen Andersonc34d1122008-12-15 03:52:17 +00002290#endif
2291
Chris Lattnerb2412a82009-09-21 02:42:51 +00002292 return Changed;
Owen Anderson1ad2cb72007-07-24 17:55:58 +00002293}
Nuno Lopes7cdd9ee2008-10-10 16:25:50 +00002294
2295void GVN::cleanupGlobalSets() {
2296 VN.clear();
Nuno Lopes7cdd9ee2008-10-10 16:25:50 +00002297
2298 for (DenseMap<BasicBlock*, ValueNumberScope*>::iterator
2299 I = localAvail.begin(), E = localAvail.end(); I != E; ++I)
2300 delete I->second;
2301 localAvail.clear();
2302}
Bill Wendling246dbbb2008-12-22 21:36:08 +00002303
2304/// verifyRemoved - Verify that the specified instruction does not occur in our
2305/// internal data structures.
Bill Wendling6d463f22008-12-22 22:28:56 +00002306void GVN::verifyRemoved(const Instruction *Inst) const {
2307 VN.verifyRemoved(Inst);
Bill Wendling70ded192008-12-22 22:14:07 +00002308
Bill Wendling6d463f22008-12-22 22:28:56 +00002309 // Walk through the value number scope to make sure the instruction isn't
2310 // ferreted away in it.
Jeffrey Yasskin81cf4322009-11-10 01:02:17 +00002311 for (DenseMap<BasicBlock*, ValueNumberScope*>::const_iterator
Bill Wendling6d463f22008-12-22 22:28:56 +00002312 I = localAvail.begin(), E = localAvail.end(); I != E; ++I) {
2313 const ValueNumberScope *VNS = I->second;
2314
2315 while (VNS) {
Jeffrey Yasskin81cf4322009-11-10 01:02:17 +00002316 for (DenseMap<uint32_t, Value*>::const_iterator
Bill Wendling6d463f22008-12-22 22:28:56 +00002317 II = VNS->table.begin(), IE = VNS->table.end(); II != IE; ++II) {
2318 assert(II->second != Inst && "Inst still in value numbering scope!");
2319 }
2320
2321 VNS = VNS->parent;
Bill Wendling70ded192008-12-22 22:14:07 +00002322 }
2323 }
Bill Wendling246dbbb2008-12-22 21:36:08 +00002324}