blob: 80e0027a1742240c63ff3cdb4c7a81852512943c [file] [log] [blame]
Chris Lattner159b98f2008-12-05 07:49:08 +00001//===- GVN.cpp - Eliminate redundant values and loads ---------------------===//
Owen Anderson85c40642007-07-24 17:55:58 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner081ce942007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Owen Anderson85c40642007-07-24 17:55:58 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This pass performs global value numbering to eliminate fully redundant
11// instructions. It also performs simple dead load elimination.
12//
John Criswell6e0aa282009-03-10 15:04:53 +000013// Note that this pass does the value numbering itself; it does not use the
Matthijs Kooijman9aac1db2008-06-05 07:55:49 +000014// ValueNumbering analysis passes.
15//
Owen Anderson85c40642007-07-24 17:55:58 +000016//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "gvn"
Owen Anderson85c40642007-07-24 17:55:58 +000019#include "llvm/Transforms/Scalar.h"
Owen Anderson5d72a422007-07-25 19:57:03 +000020#include "llvm/BasicBlock.h"
Owen Andersonacfa3ad2007-07-26 18:26:51 +000021#include "llvm/Constants.h"
Owen Anderson85c40642007-07-24 17:55:58 +000022#include "llvm/DerivedTypes.h"
Chris Lattnerb7f77c72009-12-28 21:28:46 +000023#include "llvm/GlobalVariable.h"
Owen Andersonacfa3ad2007-07-26 18:26:51 +000024#include "llvm/Function.h"
Devang Patela7379552009-03-06 02:59:27 +000025#include "llvm/IntrinsicInst.h"
Owen Anderson24be4c12009-07-03 00:17:18 +000026#include "llvm/LLVMContext.h"
Chris Lattner0907b522009-09-21 05:57:11 +000027#include "llvm/Operator.h"
Owen Andersonacfa3ad2007-07-26 18:26:51 +000028#include "llvm/Value.h"
Owen Anderson85c40642007-07-24 17:55:58 +000029#include "llvm/ADT/DenseMap.h"
30#include "llvm/ADT/DepthFirstIterator.h"
Owen Andersona03e7862008-12-15 02:03:00 +000031#include "llvm/ADT/PostOrderIterator.h"
Owen Anderson85c40642007-07-24 17:55:58 +000032#include "llvm/ADT/SmallPtrSet.h"
33#include "llvm/ADT/SmallVector.h"
34#include "llvm/ADT/Statistic.h"
Owen Anderson5e9366f2007-10-18 19:39:33 +000035#include "llvm/Analysis/AliasAnalysis.h"
Chris Lattner4bb632f2009-12-06 05:29:56 +000036#include "llvm/Analysis/ConstantFolding.h"
37#include "llvm/Analysis/Dominators.h"
Victor Hernandez28f4d2f2009-10-27 20:05:49 +000038#include "llvm/Analysis/MemoryBuiltins.h"
Owen Anderson85c40642007-07-24 17:55:58 +000039#include "llvm/Analysis/MemoryDependenceAnalysis.h"
Chris Lattnerefff3222009-12-09 01:59:31 +000040#include "llvm/Analysis/PHITransAddr.h"
Owen Anderson85c40642007-07-24 17:55:58 +000041#include "llvm/Support/CFG.h"
Owen Andersona2bf7662008-06-19 19:57:25 +000042#include "llvm/Support/CommandLine.h"
Chris Lattner9c5be3c2008-03-29 04:36:18 +000043#include "llvm/Support/Debug.h"
Edwin Török675d5622009-07-11 20:10:48 +000044#include "llvm/Support/ErrorHandling.h"
Chris Lattner0907b522009-09-21 05:57:11 +000045#include "llvm/Support/GetElementPtrTypeIterator.h"
Chris Lattnercb00f732009-12-06 01:57:02 +000046#include "llvm/Support/IRBuilder.h"
Daniel Dunbar005975c2009-07-25 00:23:56 +000047#include "llvm/Support/raw_ostream.h"
Chris Lattner7741aa52009-09-20 19:03:47 +000048#include "llvm/Target/TargetData.h"
Owen Andersonec747c42008-06-19 19:54:19 +000049#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Dale Johannesena19b67f2009-06-17 20:48:23 +000050#include "llvm/Transforms/Utils/Local.h"
Chris Lattner6e5ea272009-10-10 23:50:30 +000051#include "llvm/Transforms/Utils/SSAUpdater.h"
Owen Anderson85c40642007-07-24 17:55:58 +000052using namespace llvm;
53
Bill Wendling3858cae2008-12-22 22:14:07 +000054STATISTIC(NumGVNInstr, "Number of instructions deleted");
55STATISTIC(NumGVNLoad, "Number of loads deleted");
56STATISTIC(NumGVNPRE, "Number of instructions PRE'd");
Owen Anderson7558f202008-07-15 16:28:06 +000057STATISTIC(NumGVNBlocks, "Number of blocks merged");
Bill Wendling3858cae2008-12-22 22:14:07 +000058STATISTIC(NumPRELoad, "Number of loads PRE'd");
Chris Lattner1be83222008-03-22 04:13:49 +000059
Evan Cheng019a2e12008-06-20 01:01:07 +000060static cl::opt<bool> EnablePRE("enable-pre",
Owen Anderson3a053612008-07-17 19:41:00 +000061 cl::init(true), cl::Hidden);
Dan Gohman828f89f2009-06-15 18:30:15 +000062static cl::opt<bool> EnableLoadPRE("enable-load-pre", cl::init(true));
Bob Wilsonaeeef532010-02-01 21:17:14 +000063static cl::opt<bool> EnableFullLoadPRE("enable-full-load-pre", cl::init(false));
Owen Andersona2bf7662008-06-19 19:57:25 +000064
Owen Anderson85c40642007-07-24 17:55:58 +000065//===----------------------------------------------------------------------===//
66// ValueTable Class
67//===----------------------------------------------------------------------===//
68
69/// This class holds the mapping between values and value numbers. It is used
70/// as an efficient mechanism to determine the expression-wise equivalence of
71/// two values.
72namespace {
Chris Lattnerfa2d1ba2009-09-02 06:11:42 +000073 struct Expression {
Owen Andersonb2125c82010-01-17 19:33:27 +000074 enum ExpressionOpcode {
75 ADD = Instruction::Add,
76 FADD = Instruction::FAdd,
77 SUB = Instruction::Sub,
78 FSUB = Instruction::FSub,
79 MUL = Instruction::Mul,
80 FMUL = Instruction::FMul,
81 UDIV = Instruction::UDiv,
82 SDIV = Instruction::SDiv,
83 FDIV = Instruction::FDiv,
84 UREM = Instruction::URem,
85 SREM = Instruction::SRem,
86 FREM = Instruction::FRem,
87 SHL = Instruction::Shl,
88 LSHR = Instruction::LShr,
89 ASHR = Instruction::AShr,
90 AND = Instruction::And,
91 OR = Instruction::Or,
92 XOR = Instruction::Xor,
93 TRUNC = Instruction::Trunc,
94 ZEXT = Instruction::ZExt,
95 SEXT = Instruction::SExt,
96 FPTOUI = Instruction::FPToUI,
97 FPTOSI = Instruction::FPToSI,
98 UITOFP = Instruction::UIToFP,
99 SITOFP = Instruction::SIToFP,
100 FPTRUNC = Instruction::FPTrunc,
101 FPEXT = Instruction::FPExt,
102 PTRTOINT = Instruction::PtrToInt,
103 INTTOPTR = Instruction::IntToPtr,
104 BITCAST = Instruction::BitCast,
105 ICMPEQ, ICMPNE, ICMPUGT, ICMPUGE, ICMPULT, ICMPULE,
106 ICMPSGT, ICMPSGE, ICMPSLT, ICMPSLE, FCMPOEQ,
107 FCMPOGT, FCMPOGE, FCMPOLT, FCMPOLE, FCMPONE,
108 FCMPORD, FCMPUNO, FCMPUEQ, FCMPUGT, FCMPUGE,
109 FCMPULT, FCMPULE, FCMPUNE, EXTRACT, INSERT,
110 SHUFFLE, SELECT, GEP, CALL, CONSTANT,
111 INSERTVALUE, EXTRACTVALUE, EMPTY, TOMBSTONE };
Owen Anderson85c40642007-07-24 17:55:58 +0000112
113 ExpressionOpcode opcode;
114 const Type* type;
Owen Anderson85c40642007-07-24 17:55:58 +0000115 SmallVector<uint32_t, 4> varargs;
Chris Lattnerff36c952009-09-21 02:42:51 +0000116 Value *function;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000117
Owen Anderson85c40642007-07-24 17:55:58 +0000118 Expression() { }
119 Expression(ExpressionOpcode o) : opcode(o) { }
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000120
Owen Anderson85c40642007-07-24 17:55:58 +0000121 bool operator==(const Expression &other) const {
122 if (opcode != other.opcode)
123 return false;
124 else if (opcode == EMPTY || opcode == TOMBSTONE)
125 return true;
126 else if (type != other.type)
127 return false;
Owen Anderson5e9366f2007-10-18 19:39:33 +0000128 else if (function != other.function)
129 return false;
Owen Anderson85c40642007-07-24 17:55:58 +0000130 else {
131 if (varargs.size() != other.varargs.size())
132 return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000133
Owen Anderson85c40642007-07-24 17:55:58 +0000134 for (size_t i = 0; i < varargs.size(); ++i)
135 if (varargs[i] != other.varargs[i])
136 return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000137
Owen Anderson85c40642007-07-24 17:55:58 +0000138 return true;
139 }
140 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000141
Owen Anderson85c40642007-07-24 17:55:58 +0000142 bool operator!=(const Expression &other) const {
Bill Wendling9b5d4b72008-12-22 22:16:31 +0000143 return !(*this == other);
Owen Anderson85c40642007-07-24 17:55:58 +0000144 }
145 };
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000146
Chris Lattnerfa2d1ba2009-09-02 06:11:42 +0000147 class ValueTable {
Owen Anderson85c40642007-07-24 17:55:58 +0000148 private:
149 DenseMap<Value*, uint32_t> valueNumbering;
150 DenseMap<Expression, uint32_t> expressionNumbering;
Owen Andersonbcf2bd52008-05-12 20:15:55 +0000151 AliasAnalysis* AA;
152 MemoryDependenceAnalysis* MD;
153 DominatorTree* DT;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000154
Owen Anderson85c40642007-07-24 17:55:58 +0000155 uint32_t nextValueNumber;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000156
Owen Anderson85c40642007-07-24 17:55:58 +0000157 Expression::ExpressionOpcode getOpcode(CmpInst* C);
Owen Anderson85c40642007-07-24 17:55:58 +0000158 Expression create_expression(BinaryOperator* BO);
159 Expression create_expression(CmpInst* C);
160 Expression create_expression(ShuffleVectorInst* V);
161 Expression create_expression(ExtractElementInst* C);
162 Expression create_expression(InsertElementInst* V);
163 Expression create_expression(SelectInst* V);
164 Expression create_expression(CastInst* C);
165 Expression create_expression(GetElementPtrInst* G);
Owen Anderson5e9366f2007-10-18 19:39:33 +0000166 Expression create_expression(CallInst* C);
Owen Anderson771d1122008-05-13 08:17:22 +0000167 Expression create_expression(Constant* C);
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000168 Expression create_expression(ExtractValueInst* C);
169 Expression create_expression(InsertValueInst* C);
170
171 uint32_t lookup_or_add_call(CallInst* C);
Owen Anderson85c40642007-07-24 17:55:58 +0000172 public:
Dan Gohman936a6522009-04-01 16:37:47 +0000173 ValueTable() : nextValueNumber(1) { }
Chris Lattnerff36c952009-09-21 02:42:51 +0000174 uint32_t lookup_or_add(Value *V);
175 uint32_t lookup(Value *V) const;
176 void add(Value *V, uint32_t num);
Owen Anderson85c40642007-07-24 17:55:58 +0000177 void clear();
Chris Lattnerff36c952009-09-21 02:42:51 +0000178 void erase(Value *v);
Owen Anderson85c40642007-07-24 17:55:58 +0000179 unsigned size();
Owen Andersonbcf2bd52008-05-12 20:15:55 +0000180 void setAliasAnalysis(AliasAnalysis* A) { AA = A; }
Chris Lattner02ca4422008-12-01 00:40:32 +0000181 AliasAnalysis *getAliasAnalysis() const { return AA; }
Owen Andersonbcf2bd52008-05-12 20:15:55 +0000182 void setMemDep(MemoryDependenceAnalysis* M) { MD = M; }
183 void setDomTree(DominatorTree* D) { DT = D; }
Owen Anderson8a8d13c2008-07-03 17:44:33 +0000184 uint32_t getNextUnusedValueNumber() { return nextValueNumber; }
Bill Wendling2a023742008-12-22 21:36:08 +0000185 void verifyRemoved(const Value *) const;
Owen Anderson85c40642007-07-24 17:55:58 +0000186 };
187}
188
189namespace llvm {
Chris Lattner92eea072007-09-17 18:34:04 +0000190template <> struct DenseMapInfo<Expression> {
Owen Andersonbf8a3eb2007-08-02 18:16:06 +0000191 static inline Expression getEmptyKey() {
192 return Expression(Expression::EMPTY);
193 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000194
Owen Andersonbf8a3eb2007-08-02 18:16:06 +0000195 static inline Expression getTombstoneKey() {
196 return Expression(Expression::TOMBSTONE);
197 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000198
Owen Anderson85c40642007-07-24 17:55:58 +0000199 static unsigned getHashValue(const Expression e) {
200 unsigned hash = e.opcode;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000201
Anton Korobeynikov8522e1c2008-02-20 11:26:25 +0000202 hash = ((unsigned)((uintptr_t)e.type >> 4) ^
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000203 (unsigned)((uintptr_t)e.type >> 9));
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000204
Owen Andersonbf8a3eb2007-08-02 18:16:06 +0000205 for (SmallVector<uint32_t, 4>::const_iterator I = e.varargs.begin(),
206 E = e.varargs.end(); I != E; ++I)
Owen Anderson85c40642007-07-24 17:55:58 +0000207 hash = *I + hash * 37;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000208
Anton Korobeynikov8522e1c2008-02-20 11:26:25 +0000209 hash = ((unsigned)((uintptr_t)e.function >> 4) ^
210 (unsigned)((uintptr_t)e.function >> 9)) +
211 hash * 37;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000212
Owen Anderson85c40642007-07-24 17:55:58 +0000213 return hash;
214 }
Chris Lattner92eea072007-09-17 18:34:04 +0000215 static bool isEqual(const Expression &LHS, const Expression &RHS) {
216 return LHS == RHS;
217 }
Owen Anderson85c40642007-07-24 17:55:58 +0000218};
Chris Lattner169f3a22009-12-15 07:26:43 +0000219
220template <>
221struct isPodLike<Expression> { static const bool value = true; };
222
Owen Anderson85c40642007-07-24 17:55:58 +0000223}
224
225//===----------------------------------------------------------------------===//
226// ValueTable Internal Functions
227//===----------------------------------------------------------------------===//
Owen Anderson85c40642007-07-24 17:55:58 +0000228
229Expression::ExpressionOpcode ValueTable::getOpcode(CmpInst* C) {
Nick Lewycky8f5253b2009-07-08 03:04:38 +0000230 if (isa<ICmpInst>(C)) {
Owen Anderson85c40642007-07-24 17:55:58 +0000231 switch (C->getPredicate()) {
Chris Lattner3d7103e2008-03-21 21:14:38 +0000232 default: // THIS SHOULD NEVER HAPPEN
Edwin Törökbd448e32009-07-14 16:55:14 +0000233 llvm_unreachable("Comparison with unknown predicate?");
Chris Lattner3d7103e2008-03-21 21:14:38 +0000234 case ICmpInst::ICMP_EQ: return Expression::ICMPEQ;
235 case ICmpInst::ICMP_NE: return Expression::ICMPNE;
236 case ICmpInst::ICMP_UGT: return Expression::ICMPUGT;
237 case ICmpInst::ICMP_UGE: return Expression::ICMPUGE;
238 case ICmpInst::ICMP_ULT: return Expression::ICMPULT;
239 case ICmpInst::ICMP_ULE: return Expression::ICMPULE;
240 case ICmpInst::ICMP_SGT: return Expression::ICMPSGT;
241 case ICmpInst::ICMP_SGE: return Expression::ICMPSGE;
242 case ICmpInst::ICMP_SLT: return Expression::ICMPSLT;
243 case ICmpInst::ICMP_SLE: return Expression::ICMPSLE;
Owen Anderson85c40642007-07-24 17:55:58 +0000244 }
Nick Lewycky8f5253b2009-07-08 03:04:38 +0000245 } else {
246 switch (C->getPredicate()) {
247 default: // THIS SHOULD NEVER HAPPEN
Edwin Törökbd448e32009-07-14 16:55:14 +0000248 llvm_unreachable("Comparison with unknown predicate?");
Nick Lewycky8f5253b2009-07-08 03:04:38 +0000249 case FCmpInst::FCMP_OEQ: return Expression::FCMPOEQ;
250 case FCmpInst::FCMP_OGT: return Expression::FCMPOGT;
251 case FCmpInst::FCMP_OGE: return Expression::FCMPOGE;
252 case FCmpInst::FCMP_OLT: return Expression::FCMPOLT;
253 case FCmpInst::FCMP_OLE: return Expression::FCMPOLE;
254 case FCmpInst::FCMP_ONE: return Expression::FCMPONE;
255 case FCmpInst::FCMP_ORD: return Expression::FCMPORD;
256 case FCmpInst::FCMP_UNO: return Expression::FCMPUNO;
257 case FCmpInst::FCMP_UEQ: return Expression::FCMPUEQ;
258 case FCmpInst::FCMP_UGT: return Expression::FCMPUGT;
259 case FCmpInst::FCMP_UGE: return Expression::FCMPUGE;
260 case FCmpInst::FCMP_ULT: return Expression::FCMPULT;
261 case FCmpInst::FCMP_ULE: return Expression::FCMPULE;
262 case FCmpInst::FCMP_UNE: return Expression::FCMPUNE;
263 }
Owen Anderson85c40642007-07-24 17:55:58 +0000264 }
265}
266
Owen Anderson5e9366f2007-10-18 19:39:33 +0000267Expression ValueTable::create_expression(CallInst* C) {
268 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000269
Owen Anderson5e9366f2007-10-18 19:39:33 +0000270 e.type = C->getType();
Owen Anderson5e9366f2007-10-18 19:39:33 +0000271 e.function = C->getCalledFunction();
272 e.opcode = Expression::CALL;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000273
Owen Anderson5e9366f2007-10-18 19:39:33 +0000274 for (CallInst::op_iterator I = C->op_begin()+1, E = C->op_end();
275 I != E; ++I)
Owen Anderson07f478f2008-04-11 05:11:49 +0000276 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000277
Owen Anderson5e9366f2007-10-18 19:39:33 +0000278 return e;
279}
280
Owen Anderson85c40642007-07-24 17:55:58 +0000281Expression ValueTable::create_expression(BinaryOperator* BO) {
282 Expression e;
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000283 e.varargs.push_back(lookup_or_add(BO->getOperand(0)));
284 e.varargs.push_back(lookup_or_add(BO->getOperand(1)));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000285 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000286 e.type = BO->getType();
Owen Andersonb2125c82010-01-17 19:33:27 +0000287 e.opcode = static_cast<Expression::ExpressionOpcode>(BO->getOpcode());
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000288
Owen Anderson85c40642007-07-24 17:55:58 +0000289 return e;
290}
291
292Expression ValueTable::create_expression(CmpInst* C) {
293 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000294
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000295 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
296 e.varargs.push_back(lookup_or_add(C->getOperand(1)));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000297 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000298 e.type = C->getType();
299 e.opcode = getOpcode(C);
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000300
Owen Anderson85c40642007-07-24 17:55:58 +0000301 return e;
302}
303
304Expression ValueTable::create_expression(CastInst* C) {
305 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000306
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000307 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000308 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000309 e.type = C->getType();
Owen Andersonb2125c82010-01-17 19:33:27 +0000310 e.opcode = static_cast<Expression::ExpressionOpcode>(C->getOpcode());
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000311
Owen Anderson85c40642007-07-24 17:55:58 +0000312 return e;
313}
314
315Expression ValueTable::create_expression(ShuffleVectorInst* S) {
316 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000317
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000318 e.varargs.push_back(lookup_or_add(S->getOperand(0)));
319 e.varargs.push_back(lookup_or_add(S->getOperand(1)));
320 e.varargs.push_back(lookup_or_add(S->getOperand(2)));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000321 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000322 e.type = S->getType();
323 e.opcode = Expression::SHUFFLE;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000324
Owen Anderson85c40642007-07-24 17:55:58 +0000325 return e;
326}
327
328Expression ValueTable::create_expression(ExtractElementInst* E) {
329 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000330
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000331 e.varargs.push_back(lookup_or_add(E->getOperand(0)));
332 e.varargs.push_back(lookup_or_add(E->getOperand(1)));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000333 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000334 e.type = E->getType();
335 e.opcode = Expression::EXTRACT;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000336
Owen Anderson85c40642007-07-24 17:55:58 +0000337 return e;
338}
339
340Expression ValueTable::create_expression(InsertElementInst* I) {
341 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000342
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000343 e.varargs.push_back(lookup_or_add(I->getOperand(0)));
344 e.varargs.push_back(lookup_or_add(I->getOperand(1)));
345 e.varargs.push_back(lookup_or_add(I->getOperand(2)));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000346 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000347 e.type = I->getType();
348 e.opcode = Expression::INSERT;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000349
Owen Anderson85c40642007-07-24 17:55:58 +0000350 return e;
351}
352
353Expression ValueTable::create_expression(SelectInst* I) {
354 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000355
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000356 e.varargs.push_back(lookup_or_add(I->getCondition()));
357 e.varargs.push_back(lookup_or_add(I->getTrueValue()));
358 e.varargs.push_back(lookup_or_add(I->getFalseValue()));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000359 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000360 e.type = I->getType();
361 e.opcode = Expression::SELECT;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000362
Owen Anderson85c40642007-07-24 17:55:58 +0000363 return e;
364}
365
366Expression ValueTable::create_expression(GetElementPtrInst* G) {
367 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000368
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000369 e.varargs.push_back(lookup_or_add(G->getPointerOperand()));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000370 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000371 e.type = G->getType();
372 e.opcode = Expression::GEP;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000373
Owen Anderson85c40642007-07-24 17:55:58 +0000374 for (GetElementPtrInst::op_iterator I = G->idx_begin(), E = G->idx_end();
375 I != E; ++I)
Owen Anderson07f478f2008-04-11 05:11:49 +0000376 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000377
Owen Anderson85c40642007-07-24 17:55:58 +0000378 return e;
379}
380
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000381Expression ValueTable::create_expression(ExtractValueInst* E) {
382 Expression e;
383
384 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
385 for (ExtractValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
386 II != IE; ++II)
387 e.varargs.push_back(*II);
388 e.function = 0;
389 e.type = E->getType();
390 e.opcode = Expression::EXTRACTVALUE;
391
392 return e;
393}
394
395Expression ValueTable::create_expression(InsertValueInst* E) {
396 Expression e;
397
398 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
399 e.varargs.push_back(lookup_or_add(E->getInsertedValueOperand()));
400 for (InsertValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
401 II != IE; ++II)
402 e.varargs.push_back(*II);
403 e.function = 0;
404 e.type = E->getType();
405 e.opcode = Expression::INSERTVALUE;
406
407 return e;
408}
409
Owen Anderson85c40642007-07-24 17:55:58 +0000410//===----------------------------------------------------------------------===//
411// ValueTable External Functions
412//===----------------------------------------------------------------------===//
413
Owen Andersone6b4ff82008-06-18 21:41:49 +0000414/// add - Insert a value into the table with a specified value number.
Chris Lattnerff36c952009-09-21 02:42:51 +0000415void ValueTable::add(Value *V, uint32_t num) {
Owen Andersone6b4ff82008-06-18 21:41:49 +0000416 valueNumbering.insert(std::make_pair(V, num));
417}
418
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000419uint32_t ValueTable::lookup_or_add_call(CallInst* C) {
420 if (AA->doesNotAccessMemory(C)) {
421 Expression exp = create_expression(C);
422 uint32_t& e = expressionNumbering[exp];
423 if (!e) e = nextValueNumber++;
424 valueNumbering[C] = e;
425 return e;
426 } else if (AA->onlyReadsMemory(C)) {
427 Expression exp = create_expression(C);
428 uint32_t& e = expressionNumbering[exp];
429 if (!e) {
430 e = nextValueNumber++;
431 valueNumbering[C] = e;
432 return e;
433 }
Dan Gohmanc8d26652009-11-14 02:27:51 +0000434 if (!MD) {
435 e = nextValueNumber++;
436 valueNumbering[C] = e;
437 return e;
438 }
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000439
440 MemDepResult local_dep = MD->getDependency(C);
441
442 if (!local_dep.isDef() && !local_dep.isNonLocal()) {
443 valueNumbering[C] = nextValueNumber;
444 return nextValueNumber++;
445 }
446
447 if (local_dep.isDef()) {
448 CallInst* local_cdep = cast<CallInst>(local_dep.getInst());
449
450 if (local_cdep->getNumOperands() != C->getNumOperands()) {
451 valueNumbering[C] = nextValueNumber;
452 return nextValueNumber++;
453 }
454
455 for (unsigned i = 1; i < C->getNumOperands(); ++i) {
456 uint32_t c_vn = lookup_or_add(C->getOperand(i));
457 uint32_t cd_vn = lookup_or_add(local_cdep->getOperand(i));
458 if (c_vn != cd_vn) {
459 valueNumbering[C] = nextValueNumber;
460 return nextValueNumber++;
461 }
462 }
463
464 uint32_t v = lookup_or_add(local_cdep);
465 valueNumbering[C] = v;
466 return v;
467 }
468
469 // Non-local case.
470 const MemoryDependenceAnalysis::NonLocalDepInfo &deps =
471 MD->getNonLocalCallDependency(CallSite(C));
472 // FIXME: call/call dependencies for readonly calls should return def, not
473 // clobber! Move the checking logic to MemDep!
474 CallInst* cdep = 0;
475
476 // Check to see if we have a single dominating call instruction that is
477 // identical to C.
478 for (unsigned i = 0, e = deps.size(); i != e; ++i) {
Chris Lattner1a957962009-12-09 07:08:01 +0000479 const NonLocalDepEntry *I = &deps[i];
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000480 // Ignore non-local dependencies.
Chris Lattner1a957962009-12-09 07:08:01 +0000481 if (I->getResult().isNonLocal())
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000482 continue;
483
484 // We don't handle non-depedencies. If we already have a call, reject
485 // instruction dependencies.
Chris Lattner1a957962009-12-09 07:08:01 +0000486 if (I->getResult().isClobber() || cdep != 0) {
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000487 cdep = 0;
488 break;
489 }
490
Chris Lattner1a957962009-12-09 07:08:01 +0000491 CallInst *NonLocalDepCall = dyn_cast<CallInst>(I->getResult().getInst());
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000492 // FIXME: All duplicated with non-local case.
Chris Lattner1a957962009-12-09 07:08:01 +0000493 if (NonLocalDepCall && DT->properlyDominates(I->getBB(), C->getParent())){
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000494 cdep = NonLocalDepCall;
495 continue;
496 }
497
498 cdep = 0;
499 break;
500 }
501
502 if (!cdep) {
503 valueNumbering[C] = nextValueNumber;
504 return nextValueNumber++;
505 }
506
507 if (cdep->getNumOperands() != C->getNumOperands()) {
508 valueNumbering[C] = nextValueNumber;
509 return nextValueNumber++;
510 }
511 for (unsigned i = 1; i < C->getNumOperands(); ++i) {
512 uint32_t c_vn = lookup_or_add(C->getOperand(i));
513 uint32_t cd_vn = lookup_or_add(cdep->getOperand(i));
514 if (c_vn != cd_vn) {
515 valueNumbering[C] = nextValueNumber;
516 return nextValueNumber++;
517 }
518 }
519
520 uint32_t v = lookup_or_add(cdep);
521 valueNumbering[C] = v;
522 return v;
523
524 } else {
525 valueNumbering[C] = nextValueNumber;
526 return nextValueNumber++;
527 }
528}
529
Owen Anderson85c40642007-07-24 17:55:58 +0000530/// lookup_or_add - Returns the value number for the specified value, assigning
531/// it a new number if it did not have one before.
Chris Lattnerff36c952009-09-21 02:42:51 +0000532uint32_t ValueTable::lookup_or_add(Value *V) {
Owen Anderson85c40642007-07-24 17:55:58 +0000533 DenseMap<Value*, uint32_t>::iterator VI = valueNumbering.find(V);
534 if (VI != valueNumbering.end())
535 return VI->second;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000536
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000537 if (!isa<Instruction>(V)) {
Owen Andersonb472cd82009-10-19 21:14:57 +0000538 valueNumbering[V] = nextValueNumber;
Owen Anderson85c40642007-07-24 17:55:58 +0000539 return nextValueNumber++;
540 }
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000541
542 Instruction* I = cast<Instruction>(V);
543 Expression exp;
544 switch (I->getOpcode()) {
545 case Instruction::Call:
546 return lookup_or_add_call(cast<CallInst>(I));
547 case Instruction::Add:
548 case Instruction::FAdd:
549 case Instruction::Sub:
550 case Instruction::FSub:
551 case Instruction::Mul:
552 case Instruction::FMul:
553 case Instruction::UDiv:
554 case Instruction::SDiv:
555 case Instruction::FDiv:
556 case Instruction::URem:
557 case Instruction::SRem:
558 case Instruction::FRem:
559 case Instruction::Shl:
560 case Instruction::LShr:
561 case Instruction::AShr:
562 case Instruction::And:
563 case Instruction::Or :
564 case Instruction::Xor:
565 exp = create_expression(cast<BinaryOperator>(I));
566 break;
567 case Instruction::ICmp:
568 case Instruction::FCmp:
569 exp = create_expression(cast<CmpInst>(I));
570 break;
571 case Instruction::Trunc:
572 case Instruction::ZExt:
573 case Instruction::SExt:
574 case Instruction::FPToUI:
575 case Instruction::FPToSI:
576 case Instruction::UIToFP:
577 case Instruction::SIToFP:
578 case Instruction::FPTrunc:
579 case Instruction::FPExt:
580 case Instruction::PtrToInt:
581 case Instruction::IntToPtr:
582 case Instruction::BitCast:
583 exp = create_expression(cast<CastInst>(I));
584 break;
585 case Instruction::Select:
586 exp = create_expression(cast<SelectInst>(I));
587 break;
588 case Instruction::ExtractElement:
589 exp = create_expression(cast<ExtractElementInst>(I));
590 break;
591 case Instruction::InsertElement:
592 exp = create_expression(cast<InsertElementInst>(I));
593 break;
594 case Instruction::ShuffleVector:
595 exp = create_expression(cast<ShuffleVectorInst>(I));
596 break;
597 case Instruction::ExtractValue:
598 exp = create_expression(cast<ExtractValueInst>(I));
599 break;
600 case Instruction::InsertValue:
601 exp = create_expression(cast<InsertValueInst>(I));
602 break;
603 case Instruction::GetElementPtr:
604 exp = create_expression(cast<GetElementPtrInst>(I));
605 break;
606 default:
607 valueNumbering[V] = nextValueNumber;
608 return nextValueNumber++;
609 }
610
611 uint32_t& e = expressionNumbering[exp];
612 if (!e) e = nextValueNumber++;
613 valueNumbering[V] = e;
614 return e;
Owen Anderson85c40642007-07-24 17:55:58 +0000615}
616
617/// lookup - Returns the value number of the specified value. Fails if
618/// the value has not yet been numbered.
Chris Lattnerff36c952009-09-21 02:42:51 +0000619uint32_t ValueTable::lookup(Value *V) const {
Jeffrey Yasskin8154d2e2009-11-10 01:02:17 +0000620 DenseMap<Value*, uint32_t>::const_iterator VI = valueNumbering.find(V);
Chris Lattner3d7103e2008-03-21 21:14:38 +0000621 assert(VI != valueNumbering.end() && "Value not numbered?");
622 return VI->second;
Owen Anderson85c40642007-07-24 17:55:58 +0000623}
624
625/// clear - Remove all entries from the ValueTable
626void ValueTable::clear() {
627 valueNumbering.clear();
628 expressionNumbering.clear();
629 nextValueNumber = 1;
630}
631
Owen Anderson5aff8002007-07-31 23:27:13 +0000632/// erase - Remove a value from the value numbering
Chris Lattnerff36c952009-09-21 02:42:51 +0000633void ValueTable::erase(Value *V) {
Owen Anderson5aff8002007-07-31 23:27:13 +0000634 valueNumbering.erase(V);
635}
636
Bill Wendling2a023742008-12-22 21:36:08 +0000637/// verifyRemoved - Verify that the value is removed from all internal data
638/// structures.
639void ValueTable::verifyRemoved(const Value *V) const {
Jeffrey Yasskin8154d2e2009-11-10 01:02:17 +0000640 for (DenseMap<Value*, uint32_t>::const_iterator
Bill Wendling2a023742008-12-22 21:36:08 +0000641 I = valueNumbering.begin(), E = valueNumbering.end(); I != E; ++I) {
642 assert(I->first != V && "Inst still occurs in value numbering map!");
643 }
644}
645
Owen Anderson85c40642007-07-24 17:55:58 +0000646//===----------------------------------------------------------------------===//
Bill Wendling42f17f62008-12-22 22:32:22 +0000647// GVN Pass
Owen Anderson85c40642007-07-24 17:55:58 +0000648//===----------------------------------------------------------------------===//
649
650namespace {
Chris Lattnerfa2d1ba2009-09-02 06:11:42 +0000651 struct ValueNumberScope {
Owen Anderson2a412722008-06-20 01:15:47 +0000652 ValueNumberScope* parent;
653 DenseMap<uint32_t, Value*> table;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000654
Owen Anderson2a412722008-06-20 01:15:47 +0000655 ValueNumberScope(ValueNumberScope* p) : parent(p) { }
656 };
657}
658
659namespace {
Owen Anderson85c40642007-07-24 17:55:58 +0000660
Chris Lattnerfa2d1ba2009-09-02 06:11:42 +0000661 class GVN : public FunctionPass {
Owen Anderson85c40642007-07-24 17:55:58 +0000662 bool runOnFunction(Function &F);
663 public:
664 static char ID; // Pass identification, replacement for typeid
Dan Gohmanc8d26652009-11-14 02:27:51 +0000665 explicit GVN(bool nopre = false, bool noloads = false)
666 : FunctionPass(&ID), NoPRE(nopre), NoLoads(noloads), MD(0) { }
Owen Anderson85c40642007-07-24 17:55:58 +0000667
668 private:
Evan Chengf036e552009-10-30 20:12:24 +0000669 bool NoPRE;
Dan Gohmanc8d26652009-11-14 02:27:51 +0000670 bool NoLoads;
Chris Lattner02ca4422008-12-01 00:40:32 +0000671 MemoryDependenceAnalysis *MD;
672 DominatorTree *DT;
673
Owen Anderson85c40642007-07-24 17:55:58 +0000674 ValueTable VN;
Owen Anderson2a412722008-06-20 01:15:47 +0000675 DenseMap<BasicBlock*, ValueNumberScope*> localAvail;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000676
Owen Anderson85c40642007-07-24 17:55:58 +0000677 // This transformation requires dominator postdominator info
678 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
Owen Anderson85c40642007-07-24 17:55:58 +0000679 AU.addRequired<DominatorTree>();
Dan Gohmanc8d26652009-11-14 02:27:51 +0000680 if (!NoLoads)
681 AU.addRequired<MemoryDependenceAnalysis>();
Owen Anderson5e9366f2007-10-18 19:39:33 +0000682 AU.addRequired<AliasAnalysis>();
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000683
Owen Andersonaef6a922008-06-23 17:49:45 +0000684 AU.addPreserved<DominatorTree>();
Owen Anderson5e9366f2007-10-18 19:39:33 +0000685 AU.addPreserved<AliasAnalysis>();
Owen Anderson85c40642007-07-24 17:55:58 +0000686 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000687
Owen Anderson85c40642007-07-24 17:55:58 +0000688 // Helper fuctions
689 // FIXME: eliminate or document these better
Owen Anderson85c40642007-07-24 17:55:58 +0000690 bool processLoad(LoadInst* L,
Chris Lattner7de20452008-03-21 22:01:16 +0000691 SmallVectorImpl<Instruction*> &toErase);
Chris Lattnerff36c952009-09-21 02:42:51 +0000692 bool processInstruction(Instruction *I,
Chris Lattner7de20452008-03-21 22:01:16 +0000693 SmallVectorImpl<Instruction*> &toErase);
Owen Andersonbf8a3eb2007-08-02 18:16:06 +0000694 bool processNonLocalLoad(LoadInst* L,
Chris Lattner7de20452008-03-21 22:01:16 +0000695 SmallVectorImpl<Instruction*> &toErase);
Chris Lattnerff36c952009-09-21 02:42:51 +0000696 bool processBlock(BasicBlock *BB);
Owen Andersone6b4ff82008-06-18 21:41:49 +0000697 void dump(DenseMap<uint32_t, Value*>& d);
Owen Andersonbe168b32007-08-14 18:04:11 +0000698 bool iterateOnFunction(Function &F);
Chris Lattnerff36c952009-09-21 02:42:51 +0000699 Value *CollapsePhi(PHINode* p);
Owen Andersone6b4ff82008-06-18 21:41:49 +0000700 bool performPRE(Function& F);
Chris Lattnerff36c952009-09-21 02:42:51 +0000701 Value *lookupNumber(BasicBlock *BB, uint32_t num);
Nuno Lopes274474b2008-10-10 16:25:50 +0000702 void cleanupGlobalSets();
Bill Wendling2a023742008-12-22 21:36:08 +0000703 void verifyRemoved(const Instruction *I) const;
Owen Anderson85c40642007-07-24 17:55:58 +0000704 };
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000705
Owen Anderson85c40642007-07-24 17:55:58 +0000706 char GVN::ID = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000707}
708
709// createGVNPass - The public interface to this file...
Dan Gohmanc8d26652009-11-14 02:27:51 +0000710FunctionPass *llvm::createGVNPass(bool NoPRE, bool NoLoads) {
711 return new GVN(NoPRE, NoLoads);
712}
Owen Anderson85c40642007-07-24 17:55:58 +0000713
714static RegisterPass<GVN> X("gvn",
715 "Global Value Numbering");
716
Owen Andersone6b4ff82008-06-18 21:41:49 +0000717void GVN::dump(DenseMap<uint32_t, Value*>& d) {
Dan Gohman288145c2009-12-18 03:25:51 +0000718 errs() << "{\n";
Owen Andersone6b4ff82008-06-18 21:41:49 +0000719 for (DenseMap<uint32_t, Value*>::iterator I = d.begin(),
Owen Anderson5d72a422007-07-25 19:57:03 +0000720 E = d.end(); I != E; ++I) {
Dan Gohman288145c2009-12-18 03:25:51 +0000721 errs() << I->first << "\n";
Owen Anderson5d72a422007-07-25 19:57:03 +0000722 I->second->dump();
723 }
Dan Gohman288145c2009-12-18 03:25:51 +0000724 errs() << "}\n";
Owen Anderson5d72a422007-07-25 19:57:03 +0000725}
726
Chris Lattnerff36c952009-09-21 02:42:51 +0000727static bool isSafeReplacement(PHINode* p, Instruction *inst) {
Owen Andersond68b1af2009-08-26 22:55:11 +0000728 if (!isa<PHINode>(inst))
729 return true;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000730
Owen Andersond68b1af2009-08-26 22:55:11 +0000731 for (Instruction::use_iterator UI = p->use_begin(), E = p->use_end();
732 UI != E; ++UI)
733 if (PHINode* use_phi = dyn_cast<PHINode>(UI))
734 if (use_phi->getParent() == inst->getParent())
735 return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000736
Owen Andersond68b1af2009-08-26 22:55:11 +0000737 return true;
738}
739
Chris Lattnerff36c952009-09-21 02:42:51 +0000740Value *GVN::CollapsePhi(PHINode *PN) {
741 Value *ConstVal = PN->hasConstantValue(DT);
742 if (!ConstVal) return 0;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000743
Chris Lattnerff36c952009-09-21 02:42:51 +0000744 Instruction *Inst = dyn_cast<Instruction>(ConstVal);
745 if (!Inst)
746 return ConstVal;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000747
Chris Lattnerff36c952009-09-21 02:42:51 +0000748 if (DT->dominates(Inst, PN))
749 if (isSafeReplacement(PN, Inst))
750 return Inst;
Owen Andersone02ad522007-08-16 22:51:56 +0000751 return 0;
752}
Owen Anderson5d72a422007-07-25 19:57:03 +0000753
Chris Lattnerdcded152008-12-02 08:16:11 +0000754/// IsValueFullyAvailableInBlock - Return true if we can prove that the value
755/// we're analyzing is fully available in the specified block. As we go, keep
Chris Lattner159b98f2008-12-05 07:49:08 +0000756/// track of which blocks we know are fully alive in FullyAvailableBlocks. This
757/// map is actually a tri-state map with the following values:
758/// 0) we know the block *is not* fully available.
759/// 1) we know the block *is* fully available.
760/// 2) we do not know whether the block is fully available or not, but we are
761/// currently speculating that it will be.
762/// 3) we are speculating for this block and have used that to speculate for
763/// other blocks.
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000764static bool IsValueFullyAvailableInBlock(BasicBlock *BB,
Chris Lattner159b98f2008-12-05 07:49:08 +0000765 DenseMap<BasicBlock*, char> &FullyAvailableBlocks) {
Chris Lattnerdcded152008-12-02 08:16:11 +0000766 // Optimistically assume that the block is fully available and check to see
767 // if we already know about this block in one lookup.
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000768 std::pair<DenseMap<BasicBlock*, char>::iterator, char> IV =
Chris Lattner159b98f2008-12-05 07:49:08 +0000769 FullyAvailableBlocks.insert(std::make_pair(BB, 2));
Chris Lattnerdcded152008-12-02 08:16:11 +0000770
771 // If the entry already existed for this block, return the precomputed value.
Chris Lattner159b98f2008-12-05 07:49:08 +0000772 if (!IV.second) {
773 // If this is a speculative "available" value, mark it as being used for
774 // speculation of other blocks.
775 if (IV.first->second == 2)
776 IV.first->second = 3;
777 return IV.first->second != 0;
778 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000779
Chris Lattnerdcded152008-12-02 08:16:11 +0000780 // Otherwise, see if it is fully available in all predecessors.
781 pred_iterator PI = pred_begin(BB), PE = pred_end(BB);
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000782
Chris Lattnerdcded152008-12-02 08:16:11 +0000783 // If this block has no predecessors, it isn't live-in here.
784 if (PI == PE)
Chris Lattner159b98f2008-12-05 07:49:08 +0000785 goto SpeculationFailure;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000786
Chris Lattnerdcded152008-12-02 08:16:11 +0000787 for (; PI != PE; ++PI)
788 // If the value isn't fully available in one of our predecessors, then it
789 // isn't fully available in this block either. Undo our previous
790 // optimistic assumption and bail out.
791 if (!IsValueFullyAvailableInBlock(*PI, FullyAvailableBlocks))
Chris Lattner159b98f2008-12-05 07:49:08 +0000792 goto SpeculationFailure;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000793
Chris Lattnerdcded152008-12-02 08:16:11 +0000794 return true;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000795
Chris Lattner159b98f2008-12-05 07:49:08 +0000796// SpeculationFailure - If we get here, we found out that this is not, after
797// all, a fully-available block. We have a problem if we speculated on this and
798// used the speculation to mark other blocks as available.
799SpeculationFailure:
800 char &BBVal = FullyAvailableBlocks[BB];
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000801
Chris Lattner159b98f2008-12-05 07:49:08 +0000802 // If we didn't speculate on this, just return with it set to false.
803 if (BBVal == 2) {
804 BBVal = 0;
805 return false;
806 }
807
808 // If we did speculate on this value, we could have blocks set to 1 that are
809 // incorrect. Walk the (transitive) successors of this block and mark them as
810 // 0 if set to one.
811 SmallVector<BasicBlock*, 32> BBWorklist;
812 BBWorklist.push_back(BB);
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000813
Dan Gohman3e7816b2010-01-05 16:27:25 +0000814 do {
Chris Lattner159b98f2008-12-05 07:49:08 +0000815 BasicBlock *Entry = BBWorklist.pop_back_val();
816 // Note that this sets blocks to 0 (unavailable) if they happen to not
817 // already be in FullyAvailableBlocks. This is safe.
818 char &EntryVal = FullyAvailableBlocks[Entry];
819 if (EntryVal == 0) continue; // Already unavailable.
820
821 // Mark as unavailable.
822 EntryVal = 0;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000823
Chris Lattner159b98f2008-12-05 07:49:08 +0000824 for (succ_iterator I = succ_begin(Entry), E = succ_end(Entry); I != E; ++I)
825 BBWorklist.push_back(*I);
Dan Gohman3e7816b2010-01-05 16:27:25 +0000826 } while (!BBWorklist.empty());
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000827
Chris Lattner159b98f2008-12-05 07:49:08 +0000828 return false;
Chris Lattnerdcded152008-12-02 08:16:11 +0000829}
830
Chris Lattnerd6b1d052009-09-20 20:09:34 +0000831
Chris Lattner012b3602009-09-21 17:24:04 +0000832/// CanCoerceMustAliasedValueToLoad - Return true if
833/// CoerceAvailableValueToLoadType will succeed.
834static bool CanCoerceMustAliasedValueToLoad(Value *StoredVal,
835 const Type *LoadTy,
836 const TargetData &TD) {
837 // If the loaded or stored value is an first class array or struct, don't try
838 // to transform them. We need to be able to bitcast to integer.
839 if (isa<StructType>(LoadTy) || isa<ArrayType>(LoadTy) ||
840 isa<StructType>(StoredVal->getType()) ||
841 isa<ArrayType>(StoredVal->getType()))
842 return false;
843
844 // The store has to be at least as big as the load.
845 if (TD.getTypeSizeInBits(StoredVal->getType()) <
846 TD.getTypeSizeInBits(LoadTy))
847 return false;
848
849 return true;
850}
851
852
Chris Lattnerd6b1d052009-09-20 20:09:34 +0000853/// CoerceAvailableValueToLoadType - If we saw a store of a value to memory, and
854/// then a load from a must-aliased pointer of a different type, try to coerce
855/// the stored value. LoadedTy is the type of the load we want to replace and
856/// InsertPt is the place to insert new instructions.
857///
858/// If we can't do it, return null.
859static Value *CoerceAvailableValueToLoadType(Value *StoredVal,
860 const Type *LoadedTy,
861 Instruction *InsertPt,
862 const TargetData &TD) {
Chris Lattner012b3602009-09-21 17:24:04 +0000863 if (!CanCoerceMustAliasedValueToLoad(StoredVal, LoadedTy, TD))
864 return 0;
865
Chris Lattnerd6b1d052009-09-20 20:09:34 +0000866 const Type *StoredValTy = StoredVal->getType();
867
868 uint64_t StoreSize = TD.getTypeSizeInBits(StoredValTy);
869 uint64_t LoadSize = TD.getTypeSizeInBits(LoadedTy);
870
871 // If the store and reload are the same size, we can always reuse it.
872 if (StoreSize == LoadSize) {
873 if (isa<PointerType>(StoredValTy) && isa<PointerType>(LoadedTy)) {
874 // Pointer to Pointer -> use bitcast.
875 return new BitCastInst(StoredVal, LoadedTy, "", InsertPt);
876 }
877
878 // Convert source pointers to integers, which can be bitcast.
879 if (isa<PointerType>(StoredValTy)) {
880 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
881 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
882 }
883
884 const Type *TypeToCastTo = LoadedTy;
885 if (isa<PointerType>(TypeToCastTo))
886 TypeToCastTo = TD.getIntPtrType(StoredValTy->getContext());
887
888 if (StoredValTy != TypeToCastTo)
889 StoredVal = new BitCastInst(StoredVal, TypeToCastTo, "", InsertPt);
890
891 // Cast to pointer if the load needs a pointer type.
892 if (isa<PointerType>(LoadedTy))
893 StoredVal = new IntToPtrInst(StoredVal, LoadedTy, "", InsertPt);
894
895 return StoredVal;
896 }
897
898 // If the loaded value is smaller than the available value, then we can
899 // extract out a piece from it. If the available value is too small, then we
900 // can't do anything.
Chris Lattner012b3602009-09-21 17:24:04 +0000901 assert(StoreSize >= LoadSize && "CanCoerceMustAliasedValueToLoad fail");
Chris Lattnerd6b1d052009-09-20 20:09:34 +0000902
903 // Convert source pointers to integers, which can be manipulated.
904 if (isa<PointerType>(StoredValTy)) {
905 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
906 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
907 }
908
909 // Convert vectors and fp to integer, which can be manipulated.
910 if (!isa<IntegerType>(StoredValTy)) {
911 StoredValTy = IntegerType::get(StoredValTy->getContext(), StoreSize);
912 StoredVal = new BitCastInst(StoredVal, StoredValTy, "", InsertPt);
913 }
914
915 // If this is a big-endian system, we need to shift the value down to the low
916 // bits so that a truncate will work.
917 if (TD.isBigEndian()) {
918 Constant *Val = ConstantInt::get(StoredVal->getType(), StoreSize-LoadSize);
919 StoredVal = BinaryOperator::CreateLShr(StoredVal, Val, "tmp", InsertPt);
920 }
921
922 // Truncate the integer to the right size now.
923 const Type *NewIntTy = IntegerType::get(StoredValTy->getContext(), LoadSize);
924 StoredVal = new TruncInst(StoredVal, NewIntTy, "trunc", InsertPt);
925
926 if (LoadedTy == NewIntTy)
927 return StoredVal;
928
929 // If the result is a pointer, inttoptr.
930 if (isa<PointerType>(LoadedTy))
931 return new IntToPtrInst(StoredVal, LoadedTy, "inttoptr", InsertPt);
932
933 // Otherwise, bitcast.
934 return new BitCastInst(StoredVal, LoadedTy, "bitcast", InsertPt);
935}
936
Chris Lattner8f912082009-09-21 06:24:16 +0000937/// GetBaseWithConstantOffset - Analyze the specified pointer to see if it can
938/// be expressed as a base pointer plus a constant offset. Return the base and
939/// offset to the caller.
940static Value *GetBaseWithConstantOffset(Value *Ptr, int64_t &Offset,
Chris Lattneraae7fcb2009-09-21 06:48:08 +0000941 const TargetData &TD) {
Chris Lattner8f912082009-09-21 06:24:16 +0000942 Operator *PtrOp = dyn_cast<Operator>(Ptr);
943 if (PtrOp == 0) return Ptr;
944
945 // Just look through bitcasts.
946 if (PtrOp->getOpcode() == Instruction::BitCast)
947 return GetBaseWithConstantOffset(PtrOp->getOperand(0), Offset, TD);
948
949 // If this is a GEP with constant indices, we can look through it.
950 GEPOperator *GEP = dyn_cast<GEPOperator>(PtrOp);
951 if (GEP == 0 || !GEP->hasAllConstantIndices()) return Ptr;
952
953 gep_type_iterator GTI = gep_type_begin(GEP);
954 for (User::op_iterator I = GEP->idx_begin(), E = GEP->idx_end(); I != E;
955 ++I, ++GTI) {
956 ConstantInt *OpC = cast<ConstantInt>(*I);
957 if (OpC->isZero()) continue;
958
959 // Handle a struct and array indices which add their offset to the pointer.
960 if (const StructType *STy = dyn_cast<StructType>(*GTI)) {
Chris Lattneraae7fcb2009-09-21 06:48:08 +0000961 Offset += TD.getStructLayout(STy)->getElementOffset(OpC->getZExtValue());
Chris Lattner8f912082009-09-21 06:24:16 +0000962 } else {
Chris Lattneraae7fcb2009-09-21 06:48:08 +0000963 uint64_t Size = TD.getTypeAllocSize(GTI.getIndexedType());
Chris Lattner8f912082009-09-21 06:24:16 +0000964 Offset += OpC->getSExtValue()*Size;
965 }
966 }
967
968 // Re-sign extend from the pointer size if needed to get overflow edge cases
969 // right.
Chris Lattneraae7fcb2009-09-21 06:48:08 +0000970 unsigned PtrSize = TD.getPointerSizeInBits();
Chris Lattner8f912082009-09-21 06:24:16 +0000971 if (PtrSize < 64)
972 Offset = (Offset << (64-PtrSize)) >> (64-PtrSize);
973
974 return GetBaseWithConstantOffset(GEP->getPointerOperand(), Offset, TD);
975}
976
977
Chris Lattnercb00f732009-12-06 01:57:02 +0000978/// AnalyzeLoadFromClobberingWrite - This function is called when we have a
979/// memdep query of a load that ends up being a clobbering memory write (store,
980/// memset, memcpy, memmove). This means that the write *may* provide bits used
981/// by the load but we can't be sure because the pointers don't mustalias.
982///
983/// Check this case to see if there is anything more we can do before we give
984/// up. This returns -1 if we have to give up, or a byte number in the stored
985/// value of the piece that feeds the load.
Chris Lattner598abfd2009-12-09 07:34:10 +0000986static int AnalyzeLoadFromClobberingWrite(const Type *LoadTy, Value *LoadPtr,
987 Value *WritePtr,
Chris Lattnercb00f732009-12-06 01:57:02 +0000988 uint64_t WriteSizeInBits,
Chris Lattneraae7fcb2009-09-21 06:48:08 +0000989 const TargetData &TD) {
Chris Lattner012b3602009-09-21 17:24:04 +0000990 // If the loaded or stored value is an first class array or struct, don't try
991 // to transform them. We need to be able to bitcast to integer.
Chris Lattner598abfd2009-12-09 07:34:10 +0000992 if (isa<StructType>(LoadTy) || isa<ArrayType>(LoadTy))
Chris Lattner012b3602009-09-21 17:24:04 +0000993 return -1;
994
Chris Lattner8f912082009-09-21 06:24:16 +0000995 int64_t StoreOffset = 0, LoadOffset = 0;
Chris Lattnercb00f732009-12-06 01:57:02 +0000996 Value *StoreBase = GetBaseWithConstantOffset(WritePtr, StoreOffset, TD);
Chris Lattner8f912082009-09-21 06:24:16 +0000997 Value *LoadBase =
Chris Lattner598abfd2009-12-09 07:34:10 +0000998 GetBaseWithConstantOffset(LoadPtr, LoadOffset, TD);
Chris Lattner8f912082009-09-21 06:24:16 +0000999 if (StoreBase != LoadBase)
1000 return -1;
1001
1002 // If the load and store are to the exact same address, they should have been
1003 // a must alias. AA must have gotten confused.
1004 // FIXME: Study to see if/when this happens.
1005 if (LoadOffset == StoreOffset) {
1006#if 0
David Greeneac297082010-01-05 01:27:17 +00001007 dbgs() << "STORE/LOAD DEP WITH COMMON POINTER MISSED:\n"
Chris Lattner8f912082009-09-21 06:24:16 +00001008 << "Base = " << *StoreBase << "\n"
Chris Lattnercb00f732009-12-06 01:57:02 +00001009 << "Store Ptr = " << *WritePtr << "\n"
1010 << "Store Offs = " << StoreOffset << "\n"
Chris Lattnere559e792009-12-10 00:04:46 +00001011 << "Load Ptr = " << *LoadPtr << "\n";
Chris Lattner4c619092009-12-09 02:41:54 +00001012 abort();
Chris Lattner8f912082009-09-21 06:24:16 +00001013#endif
1014 return -1;
1015 }
1016
1017 // If the load and store don't overlap at all, the store doesn't provide
1018 // anything to the load. In this case, they really don't alias at all, AA
1019 // must have gotten confused.
1020 // FIXME: Investigate cases where this bails out, e.g. rdar://7238614. Then
1021 // remove this check, as it is duplicated with what we have below.
Chris Lattner598abfd2009-12-09 07:34:10 +00001022 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy);
Chris Lattner8f912082009-09-21 06:24:16 +00001023
Chris Lattnercb00f732009-12-06 01:57:02 +00001024 if ((WriteSizeInBits & 7) | (LoadSize & 7))
Chris Lattner8f912082009-09-21 06:24:16 +00001025 return -1;
Chris Lattnercb00f732009-12-06 01:57:02 +00001026 uint64_t StoreSize = WriteSizeInBits >> 3; // Convert to bytes.
Chris Lattner8f912082009-09-21 06:24:16 +00001027 LoadSize >>= 3;
1028
1029
1030 bool isAAFailure = false;
1031 if (StoreOffset < LoadOffset) {
1032 isAAFailure = StoreOffset+int64_t(StoreSize) <= LoadOffset;
1033 } else {
1034 isAAFailure = LoadOffset+int64_t(LoadSize) <= StoreOffset;
1035 }
1036 if (isAAFailure) {
1037#if 0
David Greeneac297082010-01-05 01:27:17 +00001038 dbgs() << "STORE LOAD DEP WITH COMMON BASE:\n"
Chris Lattner8f912082009-09-21 06:24:16 +00001039 << "Base = " << *StoreBase << "\n"
Chris Lattnercb00f732009-12-06 01:57:02 +00001040 << "Store Ptr = " << *WritePtr << "\n"
1041 << "Store Offs = " << StoreOffset << "\n"
Chris Lattnere559e792009-12-10 00:04:46 +00001042 << "Load Ptr = " << *LoadPtr << "\n";
Chris Lattner4c619092009-12-09 02:41:54 +00001043 abort();
Chris Lattner8f912082009-09-21 06:24:16 +00001044#endif
1045 return -1;
1046 }
1047
1048 // If the Load isn't completely contained within the stored bits, we don't
1049 // have all the bits to feed it. We could do something crazy in the future
1050 // (issue a smaller load then merge the bits in) but this seems unlikely to be
1051 // valuable.
1052 if (StoreOffset > LoadOffset ||
1053 StoreOffset+StoreSize < LoadOffset+LoadSize)
1054 return -1;
1055
1056 // Okay, we can do this transformation. Return the number of bytes into the
1057 // store that the load is.
1058 return LoadOffset-StoreOffset;
1059}
1060
Chris Lattnercb00f732009-12-06 01:57:02 +00001061/// AnalyzeLoadFromClobberingStore - This function is called when we have a
1062/// memdep query of a load that ends up being a clobbering store.
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001063static int AnalyzeLoadFromClobberingStore(const Type *LoadTy, Value *LoadPtr,
1064 StoreInst *DepSI,
Chris Lattnercb00f732009-12-06 01:57:02 +00001065 const TargetData &TD) {
1066 // Cannot handle reading from store of first-class aggregate yet.
1067 if (isa<StructType>(DepSI->getOperand(0)->getType()) ||
1068 isa<ArrayType>(DepSI->getOperand(0)->getType()))
1069 return -1;
1070
1071 Value *StorePtr = DepSI->getPointerOperand();
Chris Lattnerc97c9a02009-12-10 00:11:45 +00001072 uint64_t StoreSize = TD.getTypeSizeInBits(DepSI->getOperand(0)->getType());
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001073 return AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr,
Chris Lattner598abfd2009-12-09 07:34:10 +00001074 StorePtr, StoreSize, TD);
Chris Lattnercb00f732009-12-06 01:57:02 +00001075}
1076
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001077static int AnalyzeLoadFromClobberingMemInst(const Type *LoadTy, Value *LoadPtr,
1078 MemIntrinsic *MI,
Chris Lattnercb00f732009-12-06 01:57:02 +00001079 const TargetData &TD) {
1080 // If the mem operation is a non-constant size, we can't handle it.
1081 ConstantInt *SizeCst = dyn_cast<ConstantInt>(MI->getLength());
1082 if (SizeCst == 0) return -1;
1083 uint64_t MemSizeInBits = SizeCst->getZExtValue()*8;
Chris Lattner4bb632f2009-12-06 05:29:56 +00001084
1085 // If this is memset, we just need to see if the offset is valid in the size
1086 // of the memset..
Chris Lattnercb00f732009-12-06 01:57:02 +00001087 if (MI->getIntrinsicID() == Intrinsic::memset)
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001088 return AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr, MI->getDest(),
1089 MemSizeInBits, TD);
Chris Lattnercb00f732009-12-06 01:57:02 +00001090
Chris Lattner4bb632f2009-12-06 05:29:56 +00001091 // If we have a memcpy/memmove, the only case we can handle is if this is a
1092 // copy from constant memory. In that case, we can read directly from the
1093 // constant memory.
1094 MemTransferInst *MTI = cast<MemTransferInst>(MI);
1095
1096 Constant *Src = dyn_cast<Constant>(MTI->getSource());
1097 if (Src == 0) return -1;
1098
1099 GlobalVariable *GV = dyn_cast<GlobalVariable>(Src->getUnderlyingObject());
1100 if (GV == 0 || !GV->isConstant()) return -1;
1101
1102 // See if the access is within the bounds of the transfer.
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001103 int Offset = AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr,
1104 MI->getDest(), MemSizeInBits, TD);
Chris Lattner4bb632f2009-12-06 05:29:56 +00001105 if (Offset == -1)
1106 return Offset;
1107
1108 // Otherwise, see if we can constant fold a load from the constant with the
1109 // offset applied as appropriate.
1110 Src = ConstantExpr::getBitCast(Src,
1111 llvm::Type::getInt8PtrTy(Src->getContext()));
1112 Constant *OffsetCst =
1113 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1114 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001115 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(LoadTy));
Chris Lattner4bb632f2009-12-06 05:29:56 +00001116 if (ConstantFoldLoadFromConstPtr(Src, &TD))
1117 return Offset;
Chris Lattnercb00f732009-12-06 01:57:02 +00001118 return -1;
1119}
1120
Chris Lattner8f912082009-09-21 06:24:16 +00001121
1122/// GetStoreValueForLoad - This function is called when we have a
1123/// memdep query of a load that ends up being a clobbering store. This means
1124/// that the store *may* provide bits used by the load but we can't be sure
1125/// because the pointers don't mustalias. Check this case to see if there is
1126/// anything more we can do before we give up.
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001127static Value *GetStoreValueForLoad(Value *SrcVal, unsigned Offset,
1128 const Type *LoadTy,
1129 Instruction *InsertPt, const TargetData &TD){
Chris Lattner8f912082009-09-21 06:24:16 +00001130 LLVMContext &Ctx = SrcVal->getType()->getContext();
1131
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001132 uint64_t StoreSize = TD.getTypeSizeInBits(SrcVal->getType())/8;
1133 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy)/8;
Chris Lattner8f912082009-09-21 06:24:16 +00001134
Chris Lattner2737cb42009-12-09 18:13:28 +00001135 IRBuilder<> Builder(InsertPt->getParent(), InsertPt);
Chris Lattner8f912082009-09-21 06:24:16 +00001136
1137 // Compute which bits of the stored value are being used by the load. Convert
1138 // to an integer type to start with.
1139 if (isa<PointerType>(SrcVal->getType()))
Chris Lattner2737cb42009-12-09 18:13:28 +00001140 SrcVal = Builder.CreatePtrToInt(SrcVal, TD.getIntPtrType(Ctx), "tmp");
Chris Lattner8f912082009-09-21 06:24:16 +00001141 if (!isa<IntegerType>(SrcVal->getType()))
Chris Lattner2737cb42009-12-09 18:13:28 +00001142 SrcVal = Builder.CreateBitCast(SrcVal, IntegerType::get(Ctx, StoreSize*8),
1143 "tmp");
Chris Lattner8f912082009-09-21 06:24:16 +00001144
1145 // Shift the bits to the least significant depending on endianness.
1146 unsigned ShiftAmt;
Chris Lattnercb00f732009-12-06 01:57:02 +00001147 if (TD.isLittleEndian())
Chris Lattner8f912082009-09-21 06:24:16 +00001148 ShiftAmt = Offset*8;
Chris Lattnercb00f732009-12-06 01:57:02 +00001149 else
Chris Lattner1846fa02009-09-21 17:55:47 +00001150 ShiftAmt = (StoreSize-LoadSize-Offset)*8;
Chris Lattner8f912082009-09-21 06:24:16 +00001151
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001152 if (ShiftAmt)
Chris Lattner2737cb42009-12-09 18:13:28 +00001153 SrcVal = Builder.CreateLShr(SrcVal, ShiftAmt, "tmp");
Chris Lattner8f912082009-09-21 06:24:16 +00001154
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001155 if (LoadSize != StoreSize)
Chris Lattner2737cb42009-12-09 18:13:28 +00001156 SrcVal = Builder.CreateTrunc(SrcVal, IntegerType::get(Ctx, LoadSize*8),
1157 "tmp");
Chris Lattner8f912082009-09-21 06:24:16 +00001158
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001159 return CoerceAvailableValueToLoadType(SrcVal, LoadTy, InsertPt, TD);
Chris Lattner8f912082009-09-21 06:24:16 +00001160}
1161
Chris Lattnercb00f732009-12-06 01:57:02 +00001162/// GetMemInstValueForLoad - This function is called when we have a
1163/// memdep query of a load that ends up being a clobbering mem intrinsic.
1164static Value *GetMemInstValueForLoad(MemIntrinsic *SrcInst, unsigned Offset,
1165 const Type *LoadTy, Instruction *InsertPt,
1166 const TargetData &TD){
1167 LLVMContext &Ctx = LoadTy->getContext();
1168 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy)/8;
1169
1170 IRBuilder<> Builder(InsertPt->getParent(), InsertPt);
1171
1172 // We know that this method is only called when the mem transfer fully
1173 // provides the bits for the load.
1174 if (MemSetInst *MSI = dyn_cast<MemSetInst>(SrcInst)) {
1175 // memset(P, 'x', 1234) -> splat('x'), even if x is a variable, and
1176 // independently of what the offset is.
1177 Value *Val = MSI->getValue();
1178 if (LoadSize != 1)
1179 Val = Builder.CreateZExt(Val, IntegerType::get(Ctx, LoadSize*8));
1180
1181 Value *OneElt = Val;
1182
1183 // Splat the value out to the right number of bits.
1184 for (unsigned NumBytesSet = 1; NumBytesSet != LoadSize; ) {
1185 // If we can double the number of bytes set, do it.
1186 if (NumBytesSet*2 <= LoadSize) {
1187 Value *ShVal = Builder.CreateShl(Val, NumBytesSet*8);
1188 Val = Builder.CreateOr(Val, ShVal);
1189 NumBytesSet <<= 1;
1190 continue;
1191 }
1192
1193 // Otherwise insert one byte at a time.
1194 Value *ShVal = Builder.CreateShl(Val, 1*8);
1195 Val = Builder.CreateOr(OneElt, ShVal);
1196 ++NumBytesSet;
1197 }
1198
1199 return CoerceAvailableValueToLoadType(Val, LoadTy, InsertPt, TD);
1200 }
Chris Lattner4bb632f2009-12-06 05:29:56 +00001201
1202 // Otherwise, this is a memcpy/memmove from a constant global.
1203 MemTransferInst *MTI = cast<MemTransferInst>(SrcInst);
1204 Constant *Src = cast<Constant>(MTI->getSource());
1205
1206 // Otherwise, see if we can constant fold a load from the constant with the
1207 // offset applied as appropriate.
1208 Src = ConstantExpr::getBitCast(Src,
1209 llvm::Type::getInt8PtrTy(Src->getContext()));
1210 Constant *OffsetCst =
1211 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1212 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
1213 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(LoadTy));
1214 return ConstantFoldLoadFromConstPtr(Src, &TD);
Chris Lattnercb00f732009-12-06 01:57:02 +00001215}
1216
1217
1218
Chris Lattner19b84b32009-09-21 06:30:24 +00001219struct AvailableValueInBlock {
1220 /// BB - The basic block in question.
1221 BasicBlock *BB;
Chris Lattnera96e53a2009-12-06 04:54:31 +00001222 enum ValType {
1223 SimpleVal, // A simple offsetted value that is accessed.
1224 MemIntrin // A memory intrinsic which is loaded from.
1225 };
1226
Chris Lattner19b84b32009-09-21 06:30:24 +00001227 /// V - The value that is live out of the block.
Chris Lattnera96e53a2009-12-06 04:54:31 +00001228 PointerIntPair<Value *, 1, ValType> Val;
1229
1230 /// Offset - The byte offset in Val that is interesting for the load query.
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001231 unsigned Offset;
Chris Lattner19b84b32009-09-21 06:30:24 +00001232
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001233 static AvailableValueInBlock get(BasicBlock *BB, Value *V,
1234 unsigned Offset = 0) {
Chris Lattner19b84b32009-09-21 06:30:24 +00001235 AvailableValueInBlock Res;
1236 Res.BB = BB;
Chris Lattnera96e53a2009-12-06 04:54:31 +00001237 Res.Val.setPointer(V);
1238 Res.Val.setInt(SimpleVal);
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001239 Res.Offset = Offset;
Chris Lattner19b84b32009-09-21 06:30:24 +00001240 return Res;
1241 }
Chris Lattnera96e53a2009-12-06 04:54:31 +00001242
1243 static AvailableValueInBlock getMI(BasicBlock *BB, MemIntrinsic *MI,
1244 unsigned Offset = 0) {
1245 AvailableValueInBlock Res;
1246 Res.BB = BB;
1247 Res.Val.setPointer(MI);
1248 Res.Val.setInt(MemIntrin);
1249 Res.Offset = Offset;
1250 return Res;
1251 }
1252
1253 bool isSimpleValue() const { return Val.getInt() == SimpleVal; }
1254 Value *getSimpleValue() const {
1255 assert(isSimpleValue() && "Wrong accessor");
1256 return Val.getPointer();
1257 }
1258
1259 MemIntrinsic *getMemIntrinValue() const {
1260 assert(!isSimpleValue() && "Wrong accessor");
1261 return cast<MemIntrinsic>(Val.getPointer());
1262 }
Chris Lattner0a555da2009-12-21 23:04:33 +00001263
1264 /// MaterializeAdjustedValue - Emit code into this block to adjust the value
1265 /// defined here to the specified type. This handles various coercion cases.
1266 Value *MaterializeAdjustedValue(const Type *LoadTy,
1267 const TargetData *TD) const {
1268 Value *Res;
1269 if (isSimpleValue()) {
1270 Res = getSimpleValue();
1271 if (Res->getType() != LoadTy) {
1272 assert(TD && "Need target data to handle type mismatch case");
1273 Res = GetStoreValueForLoad(Res, Offset, LoadTy, BB->getTerminator(),
1274 *TD);
1275
1276 DEBUG(errs() << "GVN COERCED NONLOCAL VAL:\nOffset: " << Offset << " "
1277 << *getSimpleValue() << '\n'
1278 << *Res << '\n' << "\n\n\n");
1279 }
1280 } else {
1281 Res = GetMemInstValueForLoad(getMemIntrinValue(), Offset,
1282 LoadTy, BB->getTerminator(), *TD);
1283 DEBUG(errs() << "GVN COERCED NONLOCAL MEM INTRIN:\nOffset: " << Offset
1284 << " " << *getMemIntrinValue() << '\n'
1285 << *Res << '\n' << "\n\n\n");
1286 }
1287 return Res;
1288 }
Chris Lattner19b84b32009-09-21 06:30:24 +00001289};
1290
Chris Lattner6e5ea272009-10-10 23:50:30 +00001291/// ConstructSSAForLoadSet - Given a set of loads specified by ValuesPerBlock,
1292/// construct SSA form, allowing us to eliminate LI. This returns the value
1293/// that should be used at LI's definition site.
1294static Value *ConstructSSAForLoadSet(LoadInst *LI,
1295 SmallVectorImpl<AvailableValueInBlock> &ValuesPerBlock,
1296 const TargetData *TD,
Chris Lattner1c8dc432009-12-21 23:15:48 +00001297 const DominatorTree &DT,
Chris Lattner6e5ea272009-10-10 23:50:30 +00001298 AliasAnalysis *AA) {
Chris Lattner1c8dc432009-12-21 23:15:48 +00001299 // Check for the fully redundant, dominating load case. In this case, we can
1300 // just use the dominating value directly.
1301 if (ValuesPerBlock.size() == 1 &&
1302 DT.properlyDominates(ValuesPerBlock[0].BB, LI->getParent()))
1303 return ValuesPerBlock[0].MaterializeAdjustedValue(LI->getType(), TD);
1304
1305 // Otherwise, we have to construct SSA form.
Chris Lattner6e5ea272009-10-10 23:50:30 +00001306 SmallVector<PHINode*, 8> NewPHIs;
1307 SSAUpdater SSAUpdate(&NewPHIs);
1308 SSAUpdate.Initialize(LI);
1309
1310 const Type *LoadTy = LI->getType();
1311
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001312 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
Chris Lattnera96e53a2009-12-06 04:54:31 +00001313 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1314 BasicBlock *BB = AV.BB;
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001315
Chris Lattner6e5ea272009-10-10 23:50:30 +00001316 if (SSAUpdate.HasValueForBlock(BB))
1317 continue;
Chris Lattnera96e53a2009-12-06 04:54:31 +00001318
Chris Lattner0a555da2009-12-21 23:04:33 +00001319 SSAUpdate.AddAvailableValue(BB, AV.MaterializeAdjustedValue(LoadTy, TD));
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001320 }
Chris Lattner6e5ea272009-10-10 23:50:30 +00001321
1322 // Perform PHI construction.
1323 Value *V = SSAUpdate.GetValueInMiddleOfBlock(LI->getParent());
1324
1325 // If new PHI nodes were created, notify alias analysis.
1326 if (isa<PointerType>(V->getType()))
1327 for (unsigned i = 0, e = NewPHIs.size(); i != e; ++i)
1328 AA->copyValue(LI, NewPHIs[i]);
1329
1330 return V;
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001331}
1332
Owen Andersonf187daf2009-12-02 07:35:19 +00001333static bool isLifetimeStart(Instruction *Inst) {
Chris Lattnerbc6fccc2009-12-02 06:44:58 +00001334 if (IntrinsicInst* II = dyn_cast<IntrinsicInst>(Inst))
Owen Andersonf187daf2009-12-02 07:35:19 +00001335 return II->getIntrinsicID() == Intrinsic::lifetime_start;
Chris Lattnerbc6fccc2009-12-02 06:44:58 +00001336 return false;
1337}
1338
Owen Andersone0143452007-08-16 22:02:55 +00001339/// processNonLocalLoad - Attempt to eliminate a load whose dependencies are
1340/// non-local by performing PHI construction.
Chris Lattnerdcded152008-12-02 08:16:11 +00001341bool GVN::processNonLocalLoad(LoadInst *LI,
Chris Lattner7de20452008-03-21 22:01:16 +00001342 SmallVectorImpl<Instruction*> &toErase) {
Chris Lattnerdcded152008-12-02 08:16:11 +00001343 // Find the non-local dependencies of the load.
Chris Lattnerbf2b45e2009-12-22 04:25:02 +00001344 SmallVector<NonLocalDepResult, 64> Deps;
Chris Lattneraf713862008-12-09 19:25:07 +00001345 MD->getNonLocalPointerDependency(LI->getOperand(0), true, LI->getParent(),
1346 Deps);
David Greeneac297082010-01-05 01:27:17 +00001347 //DEBUG(dbgs() << "INVESTIGATING NONLOCAL LOAD: "
Dan Gohman7e124382009-07-31 20:24:18 +00001348 // << Deps.size() << *LI << '\n');
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001349
Owen Anderson90e717d2008-08-26 22:07:42 +00001350 // If we had to process more than one hundred blocks to find the
1351 // dependencies, this load isn't worth worrying about. Optimizing
1352 // it will be too expensive.
Chris Lattneraf713862008-12-09 19:25:07 +00001353 if (Deps.size() > 100)
Owen Anderson90e717d2008-08-26 22:07:42 +00001354 return false;
Chris Lattner8d1686f2008-12-18 00:51:32 +00001355
1356 // If we had a phi translation failure, we'll have a single entry which is a
1357 // clobber in the current block. Reject this early.
Chris Lattner1a957962009-12-09 07:08:01 +00001358 if (Deps.size() == 1 && Deps[0].getResult().isClobber()) {
Edwin Török3ffffac2009-06-17 18:48:18 +00001359 DEBUG(
David Greeneac297082010-01-05 01:27:17 +00001360 dbgs() << "GVN: non-local load ";
1361 WriteAsOperand(dbgs(), LI);
1362 dbgs() << " is clobbered by " << *Deps[0].getResult().getInst() << '\n';
Edwin Török3ffffac2009-06-17 18:48:18 +00001363 );
Chris Lattner8d1686f2008-12-18 00:51:32 +00001364 return false;
Edwin Török3ffffac2009-06-17 18:48:18 +00001365 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001366
Chris Lattnerdcded152008-12-02 08:16:11 +00001367 // Filter out useless results (non-locals, etc). Keep track of the blocks
1368 // where we have a value available in repl, also keep track of whether we see
1369 // dependencies that produce an unknown value for the load (such as a call
1370 // that could potentially clobber the load).
Chris Lattner19b84b32009-09-21 06:30:24 +00001371 SmallVector<AvailableValueInBlock, 16> ValuesPerBlock;
Chris Lattnerdcded152008-12-02 08:16:11 +00001372 SmallVector<BasicBlock*, 16> UnavailableBlocks;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001373
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001374 const TargetData *TD = 0;
1375
Chris Lattneraf713862008-12-09 19:25:07 +00001376 for (unsigned i = 0, e = Deps.size(); i != e; ++i) {
Chris Lattner1a957962009-12-09 07:08:01 +00001377 BasicBlock *DepBB = Deps[i].getBB();
1378 MemDepResult DepInfo = Deps[i].getResult();
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001379
Chris Lattner4531da82008-12-05 21:04:20 +00001380 if (DepInfo.isClobber()) {
Chris Lattner091a1d22009-12-09 18:21:46 +00001381 // The address being loaded in this non-local block may not be the same as
1382 // the pointer operand of the load if PHI translation occurs. Make sure
1383 // to consider the right address.
1384 Value *Address = Deps[i].getAddress();
1385
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001386 // If the dependence is to a store that writes to a superset of the bits
1387 // read by the load, we can extract the bits we need for the load from the
1388 // stored value.
1389 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInfo.getInst())) {
1390 if (TD == 0)
1391 TD = getAnalysisIfAvailable<TargetData>();
Chris Lattner091a1d22009-12-09 18:21:46 +00001392 if (TD && Address) {
1393 int Offset = AnalyzeLoadFromClobberingStore(LI->getType(), Address,
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001394 DepSI, *TD);
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001395 if (Offset != -1) {
1396 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1397 DepSI->getOperand(0),
1398 Offset));
1399 continue;
1400 }
1401 }
1402 }
Chris Lattnercb00f732009-12-06 01:57:02 +00001403
Chris Lattnercb00f732009-12-06 01:57:02 +00001404 // If the clobbering value is a memset/memcpy/memmove, see if we can
1405 // forward a value on from it.
Chris Lattnera96e53a2009-12-06 04:54:31 +00001406 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(DepInfo.getInst())) {
Chris Lattnercb00f732009-12-06 01:57:02 +00001407 if (TD == 0)
1408 TD = getAnalysisIfAvailable<TargetData>();
Chris Lattner091a1d22009-12-09 18:21:46 +00001409 if (TD && Address) {
1410 int Offset = AnalyzeLoadFromClobberingMemInst(LI->getType(), Address,
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001411 DepMI, *TD);
Chris Lattnera96e53a2009-12-06 04:54:31 +00001412 if (Offset != -1) {
1413 ValuesPerBlock.push_back(AvailableValueInBlock::getMI(DepBB, DepMI,
1414 Offset));
1415 continue;
1416 }
Chris Lattnercb00f732009-12-06 01:57:02 +00001417 }
1418 }
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001419
Chris Lattner4531da82008-12-05 21:04:20 +00001420 UnavailableBlocks.push_back(DepBB);
1421 continue;
1422 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001423
Chris Lattner4531da82008-12-05 21:04:20 +00001424 Instruction *DepInst = DepInfo.getInst();
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001425
Chris Lattner4531da82008-12-05 21:04:20 +00001426 // Loading the allocation -> undef.
Chris Lattnerbc6fccc2009-12-02 06:44:58 +00001427 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst) ||
Owen Andersonf187daf2009-12-02 07:35:19 +00001428 // Loading immediately after lifetime begin -> undef.
1429 isLifetimeStart(DepInst)) {
Chris Lattner19b84b32009-09-21 06:30:24 +00001430 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1431 UndefValue::get(LI->getType())));
Chris Lattner46876282008-12-01 01:15:42 +00001432 continue;
1433 }
Owen Andersonc07861a2009-10-28 07:05:35 +00001434
Chris Lattner19b84b32009-09-21 06:30:24 +00001435 if (StoreInst *S = dyn_cast<StoreInst>(DepInst)) {
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001436 // Reject loads and stores that are to the same address but are of
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001437 // different types if we have to.
Chris Lattnerdcded152008-12-02 08:16:11 +00001438 if (S->getOperand(0)->getType() != LI->getType()) {
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001439 if (TD == 0)
1440 TD = getAnalysisIfAvailable<TargetData>();
1441
1442 // If the stored value is larger or equal to the loaded value, we can
1443 // reuse it.
Chris Lattner012b3602009-09-21 17:24:04 +00001444 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(S->getOperand(0),
1445 LI->getType(), *TD)) {
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001446 UnavailableBlocks.push_back(DepBB);
1447 continue;
1448 }
Chris Lattnerdcded152008-12-02 08:16:11 +00001449 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001450
Chris Lattner19b84b32009-09-21 06:30:24 +00001451 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1452 S->getOperand(0)));
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001453 continue;
1454 }
1455
1456 if (LoadInst *LD = dyn_cast<LoadInst>(DepInst)) {
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001457 // If the types mismatch and we can't handle it, reject reuse of the load.
Chris Lattnerdcded152008-12-02 08:16:11 +00001458 if (LD->getType() != LI->getType()) {
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001459 if (TD == 0)
1460 TD = getAnalysisIfAvailable<TargetData>();
1461
1462 // If the stored value is larger or equal to the loaded value, we can
1463 // reuse it.
Chris Lattner012b3602009-09-21 17:24:04 +00001464 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(LD, LI->getType(),*TD)){
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001465 UnavailableBlocks.push_back(DepBB);
1466 continue;
1467 }
Chris Lattnerdcded152008-12-02 08:16:11 +00001468 }
Chris Lattner19b84b32009-09-21 06:30:24 +00001469 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB, LD));
Chris Lattnerdcded152008-12-02 08:16:11 +00001470 continue;
Owen Anderson5d72a422007-07-25 19:57:03 +00001471 }
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001472
1473 UnavailableBlocks.push_back(DepBB);
1474 continue;
Chris Lattner3d7103e2008-03-21 21:14:38 +00001475 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001476
Chris Lattnerdcded152008-12-02 08:16:11 +00001477 // If we have no predecessors that produce a known value for this load, exit
1478 // early.
1479 if (ValuesPerBlock.empty()) return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001480
Chris Lattnerdcded152008-12-02 08:16:11 +00001481 // If all of the instructions we depend on produce a known value for this
1482 // load, then it is fully redundant and we can use PHI insertion to compute
1483 // its value. Insert PHIs and remove the fully redundant value now.
1484 if (UnavailableBlocks.empty()) {
David Greeneac297082010-01-05 01:27:17 +00001485 DEBUG(dbgs() << "GVN REMOVING NONLOCAL LOAD: " << *LI << '\n');
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001486
Chris Lattnerdcded152008-12-02 08:16:11 +00001487 // Perform PHI construction.
Chris Lattner1c8dc432009-12-21 23:15:48 +00001488 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD, *DT,
Chris Lattner6e5ea272009-10-10 23:50:30 +00001489 VN.getAliasAnalysis());
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001490 LI->replaceAllUsesWith(V);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001491
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001492 if (isa<PHINode>(V))
1493 V->takeName(LI);
1494 if (isa<PointerType>(V->getType()))
1495 MD->invalidateCachedPointerInfo(V);
Chris Lattnerdcded152008-12-02 08:16:11 +00001496 toErase.push_back(LI);
1497 NumGVNLoad++;
1498 return true;
1499 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001500
Chris Lattnerdcded152008-12-02 08:16:11 +00001501 if (!EnablePRE || !EnableLoadPRE)
1502 return false;
1503
1504 // Okay, we have *some* definitions of the value. This means that the value
1505 // is available in some of our (transitive) predecessors. Lets think about
1506 // doing PRE of this load. This will involve inserting a new load into the
1507 // predecessor when it's not available. We could do this in general, but
1508 // prefer to not increase code size. As such, we only do this when we know
1509 // that we only have to insert *one* load (which means we're basically moving
1510 // the load, not inserting a new one).
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001511
Owen Andersondd37b182009-05-31 09:03:40 +00001512 SmallPtrSet<BasicBlock *, 4> Blockers;
1513 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1514 Blockers.insert(UnavailableBlocks[i]);
1515
1516 // Lets find first basic block with more than one predecessor. Walk backwards
1517 // through predecessors if needed.
Chris Lattnerdcded152008-12-02 08:16:11 +00001518 BasicBlock *LoadBB = LI->getParent();
Owen Andersondd37b182009-05-31 09:03:40 +00001519 BasicBlock *TmpBB = LoadBB;
1520
1521 bool isSinglePred = false;
Dale Johannesena19b67f2009-06-17 20:48:23 +00001522 bool allSingleSucc = true;
Owen Andersondd37b182009-05-31 09:03:40 +00001523 while (TmpBB->getSinglePredecessor()) {
1524 isSinglePred = true;
1525 TmpBB = TmpBB->getSinglePredecessor();
Owen Andersondd37b182009-05-31 09:03:40 +00001526 if (TmpBB == LoadBB) // Infinite (unreachable) loop.
1527 return false;
1528 if (Blockers.count(TmpBB))
1529 return false;
Dale Johannesena19b67f2009-06-17 20:48:23 +00001530 if (TmpBB->getTerminator()->getNumSuccessors() != 1)
1531 allSingleSucc = false;
Owen Andersondd37b182009-05-31 09:03:40 +00001532 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001533
Owen Andersondd37b182009-05-31 09:03:40 +00001534 assert(TmpBB);
1535 LoadBB = TmpBB;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001536
Chris Lattnerdcded152008-12-02 08:16:11 +00001537 // If we have a repl set with LI itself in it, this means we have a loop where
1538 // at least one of the values is LI. Since this means that we won't be able
1539 // to eliminate LI even if we insert uses in the other predecessors, we will
1540 // end up increasing code size. Reject this by scanning for LI.
Bob Wilsonaeeef532010-02-01 21:17:14 +00001541 if (!EnableFullLoadPRE) {
1542 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i)
1543 if (ValuesPerBlock[i].isSimpleValue() &&
1544 ValuesPerBlock[i].getSimpleValue() == LI)
1545 return false;
1546 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001547
Chris Lattnera96e53a2009-12-06 04:54:31 +00001548 // FIXME: It is extremely unclear what this loop is doing, other than
1549 // artificially restricting loadpre.
Owen Andersondd37b182009-05-31 09:03:40 +00001550 if (isSinglePred) {
1551 bool isHot = false;
Chris Lattnera96e53a2009-12-06 04:54:31 +00001552 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
1553 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1554 if (AV.isSimpleValue())
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001555 // "Hot" Instruction is in some loop (because it dominates its dep.
1556 // instruction).
Chris Lattnera96e53a2009-12-06 04:54:31 +00001557 if (Instruction *I = dyn_cast<Instruction>(AV.getSimpleValue()))
1558 if (DT->dominates(LI, I)) {
1559 isHot = true;
1560 break;
1561 }
1562 }
Owen Andersondd37b182009-05-31 09:03:40 +00001563
1564 // We are interested only in "hot" instructions. We don't want to do any
1565 // mis-optimizations here.
1566 if (!isHot)
1567 return false;
1568 }
1569
Bob Wilsonaeeef532010-02-01 21:17:14 +00001570 // Check to see how many predecessors have the loaded value fully
1571 // available.
1572 DenseMap<BasicBlock*, Value*> PredLoads;
Chris Lattner159b98f2008-12-05 07:49:08 +00001573 DenseMap<BasicBlock*, char> FullyAvailableBlocks;
Chris Lattnerdcded152008-12-02 08:16:11 +00001574 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i)
Chris Lattner19b84b32009-09-21 06:30:24 +00001575 FullyAvailableBlocks[ValuesPerBlock[i].BB] = true;
Chris Lattnerdcded152008-12-02 08:16:11 +00001576 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1577 FullyAvailableBlocks[UnavailableBlocks[i]] = false;
1578
1579 for (pred_iterator PI = pred_begin(LoadBB), E = pred_end(LoadBB);
1580 PI != E; ++PI) {
Bob Wilsonaeeef532010-02-01 21:17:14 +00001581 BasicBlock *Pred = *PI;
1582 if (IsValueFullyAvailableInBlock(Pred, FullyAvailableBlocks)) {
Chris Lattnerdcded152008-12-02 08:16:11 +00001583 continue;
Bob Wilsonaeeef532010-02-01 21:17:14 +00001584 }
1585 PredLoads[Pred] = 0;
1586 // We don't currently handle critical edges :(
1587 if (Pred->getTerminator()->getNumSuccessors() != 1) {
1588 DEBUG(dbgs() << "COULD NOT PRE LOAD BECAUSE OF CRITICAL EDGE '"
1589 << Pred->getName() << "': " << *LI << '\n');
Chris Lattnerdcded152008-12-02 08:16:11 +00001590 return false;
Bob Wilsonaeeef532010-02-01 21:17:14 +00001591 }
Chris Lattnerdcded152008-12-02 08:16:11 +00001592 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001593
Bob Wilsonaeeef532010-02-01 21:17:14 +00001594 // Decide whether PRE is profitable for this load.
1595 unsigned NumUnavailablePreds = PredLoads.size();
1596 assert(NumUnavailablePreds != 0 &&
Chris Lattnerdcded152008-12-02 08:16:11 +00001597 "Fully available value should be eliminated above!");
Bob Wilsonaeeef532010-02-01 21:17:14 +00001598 if (!EnableFullLoadPRE) {
1599 // If this load is unavailable in multiple predecessors, reject it.
1600 // FIXME: If we could restructure the CFG, we could make a common pred with
1601 // all the preds that don't have an available LI and insert a new load into
1602 // that one block.
1603 if (NumUnavailablePreds != 1)
1604 return false;
Owen Anderson5b299672007-08-07 23:12:31 +00001605 }
Bob Wilsonaeeef532010-02-01 21:17:14 +00001606
1607 // Check if the load can safely be moved to all the unavailable predecessors.
1608 bool CanDoPRE = true;
Chris Lattner1c2de2b2009-11-28 15:39:14 +00001609 SmallVector<Instruction*, 8> NewInsts;
Bob Wilsonaeeef532010-02-01 21:17:14 +00001610 for (DenseMap<BasicBlock*, Value*>::iterator I = PredLoads.begin(),
1611 E = PredLoads.end(); I != E; ++I) {
1612 BasicBlock *UnavailablePred = I->first;
1613
1614 // Do PHI translation to get its value in the predecessor if necessary. The
1615 // returned pointer (if non-null) is guaranteed to dominate UnavailablePred.
1616
1617 // If all preds have a single successor, then we know it is safe to insert
1618 // the load on the pred (?!?), so we can insert code to materialize the
1619 // pointer if it is not available.
1620 PHITransAddr Address(LI->getOperand(0), TD);
1621 Value *LoadPtr = 0;
1622 if (allSingleSucc) {
1623 LoadPtr = Address.PHITranslateWithInsertion(LoadBB, UnavailablePred,
1624 *DT, NewInsts);
1625 } else {
1626 Address.PHITranslateValue(LoadBB, UnavailablePred);
1627 LoadPtr = Address.getAddr();
Chris Lattnerefff3222009-12-09 01:59:31 +00001628
Bob Wilsonaeeef532010-02-01 21:17:14 +00001629 // Make sure the value is live in the predecessor.
1630 if (Instruction *Inst = dyn_cast_or_null<Instruction>(LoadPtr))
1631 if (!DT->dominates(Inst->getParent(), UnavailablePred))
1632 LoadPtr = 0;
1633 }
1634
1635 // If we couldn't find or insert a computation of this phi translated value,
1636 // we fail PRE.
1637 if (LoadPtr == 0) {
1638 DEBUG(dbgs() << "COULDN'T INSERT PHI TRANSLATED VALUE OF: "
1639 << *LI->getOperand(0) << "\n");
1640 CanDoPRE = false;
1641 break;
1642 }
1643
1644 // Make sure it is valid to move this load here. We have to watch out for:
1645 // @1 = getelementptr (i8* p, ...
1646 // test p and branch if == 0
1647 // load @1
1648 // It is valid to have the getelementptr before the test, even if p can be 0,
1649 // as getelementptr only does address arithmetic.
1650 // If we are not pushing the value through any multiple-successor blocks
1651 // we do not have this case. Otherwise, check that the load is safe to
1652 // put anywhere; this can be improved, but should be conservatively safe.
1653 if (!allSingleSucc &&
1654 // FIXME: REEVALUTE THIS.
1655 !isSafeToLoadUnconditionally(LoadPtr,
1656 UnavailablePred->getTerminator(),
1657 LI->getAlignment(), TD)) {
1658 CanDoPRE = false;
1659 break;
1660 }
1661
1662 I->second = LoadPtr;
Chris Lattnerefff3222009-12-09 01:59:31 +00001663 }
1664
Bob Wilsonaeeef532010-02-01 21:17:14 +00001665 if (!CanDoPRE) {
1666 while (!NewInsts.empty())
1667 NewInsts.pop_back_val()->eraseFromParent();
Dale Johannesena19b67f2009-06-17 20:48:23 +00001668 return false;
Chris Lattner80c535b2009-11-28 16:08:18 +00001669 }
Dale Johannesena19b67f2009-06-17 20:48:23 +00001670
Chris Lattnerdcded152008-12-02 08:16:11 +00001671 // Okay, we can eliminate this load by inserting a reload in the predecessor
1672 // and using PHI construction to get the value in the other predecessors, do
1673 // it.
David Greeneac297082010-01-05 01:27:17 +00001674 DEBUG(dbgs() << "GVN REMOVING PRE LOAD: " << *LI << '\n');
Chris Lattner80c535b2009-11-28 16:08:18 +00001675 DEBUG(if (!NewInsts.empty())
David Greeneac297082010-01-05 01:27:17 +00001676 dbgs() << "INSERTED " << NewInsts.size() << " INSTS: "
Chris Lattner80c535b2009-11-28 16:08:18 +00001677 << *NewInsts.back() << '\n');
1678
Bob Wilsonaeeef532010-02-01 21:17:14 +00001679 // Assign value numbers to the new instructions.
1680 for (unsigned i = 0, e = NewInsts.size(); i != e; ++i) {
1681 // FIXME: We really _ought_ to insert these value numbers into their
1682 // parent's availability map. However, in doing so, we risk getting into
1683 // ordering issues. If a block hasn't been processed yet, we would be
1684 // marking a value as AVAIL-IN, which isn't what we intend.
1685 VN.lookup_or_add(NewInsts[i]);
1686 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001687
Bob Wilsonaeeef532010-02-01 21:17:14 +00001688 for (DenseMap<BasicBlock*, Value*>::iterator I = PredLoads.begin(),
1689 E = PredLoads.end(); I != E; ++I) {
1690 BasicBlock *UnavailablePred = I->first;
1691 Value *LoadPtr = I->second;
1692
1693 Value *NewLoad = new LoadInst(LoadPtr, LI->getName()+".pre", false,
1694 LI->getAlignment(),
1695 UnavailablePred->getTerminator());
1696
1697 // Add the newly created load.
1698 ValuesPerBlock.push_back(AvailableValueInBlock::get(UnavailablePred,
1699 NewLoad));
1700 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001701
Chris Lattnerdcded152008-12-02 08:16:11 +00001702 // Perform PHI construction.
Chris Lattner1c8dc432009-12-21 23:15:48 +00001703 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD, *DT,
Chris Lattner6e5ea272009-10-10 23:50:30 +00001704 VN.getAliasAnalysis());
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001705 LI->replaceAllUsesWith(V);
1706 if (isa<PHINode>(V))
1707 V->takeName(LI);
1708 if (isa<PointerType>(V->getType()))
1709 MD->invalidateCachedPointerInfo(V);
Chris Lattnerdcded152008-12-02 08:16:11 +00001710 toErase.push_back(LI);
1711 NumPRELoad++;
Owen Anderson5d72a422007-07-25 19:57:03 +00001712 return true;
1713}
1714
Owen Andersone0143452007-08-16 22:02:55 +00001715/// processLoad - Attempt to eliminate a load, first by eliminating it
1716/// locally, and then attempting non-local elimination if that fails.
Chris Lattner4531da82008-12-05 21:04:20 +00001717bool GVN::processLoad(LoadInst *L, SmallVectorImpl<Instruction*> &toErase) {
Dan Gohmanc8d26652009-11-14 02:27:51 +00001718 if (!MD)
1719 return false;
1720
Chris Lattner4531da82008-12-05 21:04:20 +00001721 if (L->isVolatile())
Owen Anderson85c40642007-07-24 17:55:58 +00001722 return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001723
Owen Anderson85c40642007-07-24 17:55:58 +00001724 // ... to a pointer that has been loaded from before...
Chris Lattnerff36c952009-09-21 02:42:51 +00001725 MemDepResult Dep = MD->getDependency(L);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001726
Chris Lattner4531da82008-12-05 21:04:20 +00001727 // If the value isn't available, don't do anything!
Chris Lattnerff36c952009-09-21 02:42:51 +00001728 if (Dep.isClobber()) {
Chris Lattner0907b522009-09-21 05:57:11 +00001729 // Check to see if we have something like this:
Chris Lattner7741aa52009-09-20 19:03:47 +00001730 // store i32 123, i32* %P
1731 // %A = bitcast i32* %P to i8*
1732 // %B = gep i8* %A, i32 1
1733 // %C = load i8* %B
1734 //
1735 // We could do that by recognizing if the clobber instructions are obviously
1736 // a common base + constant offset, and if the previous store (or memset)
1737 // completely covers this load. This sort of thing can happen in bitfield
1738 // access code.
Chris Lattnercb00f732009-12-06 01:57:02 +00001739 Value *AvailVal = 0;
Chris Lattner0907b522009-09-21 05:57:11 +00001740 if (StoreInst *DepSI = dyn_cast<StoreInst>(Dep.getInst()))
Chris Lattner41eb59c2009-09-21 06:22:46 +00001741 if (const TargetData *TD = getAnalysisIfAvailable<TargetData>()) {
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001742 int Offset = AnalyzeLoadFromClobberingStore(L->getType(),
1743 L->getPointerOperand(),
1744 DepSI, *TD);
Chris Lattnercb00f732009-12-06 01:57:02 +00001745 if (Offset != -1)
1746 AvailVal = GetStoreValueForLoad(DepSI->getOperand(0), Offset,
1747 L->getType(), L, *TD);
Chris Lattner41eb59c2009-09-21 06:22:46 +00001748 }
Chris Lattner0907b522009-09-21 05:57:11 +00001749
Chris Lattnercb00f732009-12-06 01:57:02 +00001750 // If the clobbering value is a memset/memcpy/memmove, see if we can forward
1751 // a value on from it.
1752 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(Dep.getInst())) {
1753 if (const TargetData *TD = getAnalysisIfAvailable<TargetData>()) {
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001754 int Offset = AnalyzeLoadFromClobberingMemInst(L->getType(),
1755 L->getPointerOperand(),
1756 DepMI, *TD);
Chris Lattnercb00f732009-12-06 01:57:02 +00001757 if (Offset != -1)
1758 AvailVal = GetMemInstValueForLoad(DepMI, Offset, L->getType(), L,*TD);
1759 }
1760 }
1761
1762 if (AvailVal) {
David Greeneac297082010-01-05 01:27:17 +00001763 DEBUG(dbgs() << "GVN COERCED INST:\n" << *Dep.getInst() << '\n'
Chris Lattnercb00f732009-12-06 01:57:02 +00001764 << *AvailVal << '\n' << *L << "\n\n\n");
1765
1766 // Replace the load!
1767 L->replaceAllUsesWith(AvailVal);
1768 if (isa<PointerType>(AvailVal->getType()))
1769 MD->invalidateCachedPointerInfo(AvailVal);
1770 toErase.push_back(L);
1771 NumGVNLoad++;
1772 return true;
1773 }
1774
Edwin Török47cf8842009-05-29 09:46:03 +00001775 DEBUG(
1776 // fast print dep, using operator<< on instruction would be too slow
David Greeneac297082010-01-05 01:27:17 +00001777 dbgs() << "GVN: load ";
1778 WriteAsOperand(dbgs(), L);
Chris Lattnerff36c952009-09-21 02:42:51 +00001779 Instruction *I = Dep.getInst();
David Greeneac297082010-01-05 01:27:17 +00001780 dbgs() << " is clobbered by " << *I << '\n';
Edwin Török47cf8842009-05-29 09:46:03 +00001781 );
Chris Lattner4531da82008-12-05 21:04:20 +00001782 return false;
Edwin Török47cf8842009-05-29 09:46:03 +00001783 }
Chris Lattner4531da82008-12-05 21:04:20 +00001784
1785 // If it is defined in another block, try harder.
Chris Lattnerff36c952009-09-21 02:42:51 +00001786 if (Dep.isNonLocal())
Chris Lattner4531da82008-12-05 21:04:20 +00001787 return processNonLocalLoad(L, toErase);
Eli Friedman350307f2008-02-12 12:08:14 +00001788
Chris Lattnerff36c952009-09-21 02:42:51 +00001789 Instruction *DepInst = Dep.getInst();
Chris Lattner4531da82008-12-05 21:04:20 +00001790 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInst)) {
Chris Lattner7741aa52009-09-20 19:03:47 +00001791 Value *StoredVal = DepSI->getOperand(0);
1792
1793 // The store and load are to a must-aliased pointer, but they may not
1794 // actually have the same type. See if we know how to reuse the stored
1795 // value (depending on its type).
1796 const TargetData *TD = 0;
Chris Lattner10460aa2009-10-21 04:11:19 +00001797 if (StoredVal->getType() != L->getType()) {
1798 if ((TD = getAnalysisIfAvailable<TargetData>())) {
1799 StoredVal = CoerceAvailableValueToLoadType(StoredVal, L->getType(),
1800 L, *TD);
1801 if (StoredVal == 0)
1802 return false;
1803
David Greeneac297082010-01-05 01:27:17 +00001804 DEBUG(dbgs() << "GVN COERCED STORE:\n" << *DepSI << '\n' << *StoredVal
Chris Lattner10460aa2009-10-21 04:11:19 +00001805 << '\n' << *L << "\n\n\n");
1806 }
1807 else
Chris Lattner7741aa52009-09-20 19:03:47 +00001808 return false;
Chris Lattner7741aa52009-09-20 19:03:47 +00001809 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001810
Chris Lattner4531da82008-12-05 21:04:20 +00001811 // Remove it!
Chris Lattner7741aa52009-09-20 19:03:47 +00001812 L->replaceAllUsesWith(StoredVal);
1813 if (isa<PointerType>(StoredVal->getType()))
1814 MD->invalidateCachedPointerInfo(StoredVal);
Chris Lattner4531da82008-12-05 21:04:20 +00001815 toErase.push_back(L);
1816 NumGVNLoad++;
1817 return true;
1818 }
1819
1820 if (LoadInst *DepLI = dyn_cast<LoadInst>(DepInst)) {
Chris Lattner7741aa52009-09-20 19:03:47 +00001821 Value *AvailableVal = DepLI;
1822
1823 // The loads are of a must-aliased pointer, but they may not actually have
1824 // the same type. See if we know how to reuse the previously loaded value
1825 // (depending on its type).
1826 const TargetData *TD = 0;
Chris Lattner10460aa2009-10-21 04:11:19 +00001827 if (DepLI->getType() != L->getType()) {
1828 if ((TD = getAnalysisIfAvailable<TargetData>())) {
1829 AvailableVal = CoerceAvailableValueToLoadType(DepLI, L->getType(), L,*TD);
1830 if (AvailableVal == 0)
1831 return false;
Chris Lattner7741aa52009-09-20 19:03:47 +00001832
David Greeneac297082010-01-05 01:27:17 +00001833 DEBUG(dbgs() << "GVN COERCED LOAD:\n" << *DepLI << "\n" << *AvailableVal
Chris Lattner10460aa2009-10-21 04:11:19 +00001834 << "\n" << *L << "\n\n\n");
1835 }
1836 else
1837 return false;
Chris Lattner7741aa52009-09-20 19:03:47 +00001838 }
1839
Chris Lattner4531da82008-12-05 21:04:20 +00001840 // Remove it!
Chris Lattner7741aa52009-09-20 19:03:47 +00001841 L->replaceAllUsesWith(AvailableVal);
Chris Lattnerf81b0142008-12-09 22:06:23 +00001842 if (isa<PointerType>(DepLI->getType()))
1843 MD->invalidateCachedPointerInfo(DepLI);
Chris Lattner4531da82008-12-05 21:04:20 +00001844 toErase.push_back(L);
1845 NumGVNLoad++;
1846 return true;
1847 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001848
Chris Lattner8ea60462008-11-30 01:39:32 +00001849 // If this load really doesn't depend on anything, then we must be loading an
1850 // undef value. This can happen when loading for a fresh allocation with no
1851 // intervening stores, for example.
Victor Hernandezb1687302009-10-23 21:09:37 +00001852 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst)) {
Owen Andersonb99ecca2009-07-30 23:03:37 +00001853 L->replaceAllUsesWith(UndefValue::get(L->getType()));
Chris Lattner8ea60462008-11-30 01:39:32 +00001854 toErase.push_back(L);
Chris Lattner8ea60462008-11-30 01:39:32 +00001855 NumGVNLoad++;
Chris Lattner4531da82008-12-05 21:04:20 +00001856 return true;
Eli Friedman350307f2008-02-12 12:08:14 +00001857 }
Owen Andersonc07861a2009-10-28 07:05:35 +00001858
Owen Andersonf187daf2009-12-02 07:35:19 +00001859 // If this load occurs either right after a lifetime begin,
Owen Andersonc07861a2009-10-28 07:05:35 +00001860 // then the loaded value is undefined.
1861 if (IntrinsicInst* II = dyn_cast<IntrinsicInst>(DepInst)) {
Owen Andersonf187daf2009-12-02 07:35:19 +00001862 if (II->getIntrinsicID() == Intrinsic::lifetime_start) {
Owen Andersonc07861a2009-10-28 07:05:35 +00001863 L->replaceAllUsesWith(UndefValue::get(L->getType()));
1864 toErase.push_back(L);
1865 NumGVNLoad++;
1866 return true;
1867 }
1868 }
Eli Friedman350307f2008-02-12 12:08:14 +00001869
Chris Lattner4531da82008-12-05 21:04:20 +00001870 return false;
Owen Anderson85c40642007-07-24 17:55:58 +00001871}
1872
Chris Lattnerff36c952009-09-21 02:42:51 +00001873Value *GVN::lookupNumber(BasicBlock *BB, uint32_t num) {
Owen Andersonaef6a922008-06-23 17:49:45 +00001874 DenseMap<BasicBlock*, ValueNumberScope*>::iterator I = localAvail.find(BB);
1875 if (I == localAvail.end())
1876 return 0;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001877
Chris Lattnerff36c952009-09-21 02:42:51 +00001878 ValueNumberScope *Locals = I->second;
1879 while (Locals) {
1880 DenseMap<uint32_t, Value*>::iterator I = Locals->table.find(num);
1881 if (I != Locals->table.end())
Owen Anderson2a412722008-06-20 01:15:47 +00001882 return I->second;
Chris Lattnerff36c952009-09-21 02:42:51 +00001883 Locals = Locals->parent;
Owen Anderson2a412722008-06-20 01:15:47 +00001884 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001885
Owen Anderson2a412722008-06-20 01:15:47 +00001886 return 0;
1887}
1888
Owen Andersona03e7862008-12-15 02:03:00 +00001889
Owen Andersonf631bb62007-08-14 18:16:29 +00001890/// processInstruction - When calculating availability, handle an instruction
Owen Anderson85c40642007-07-24 17:55:58 +00001891/// by inserting it into the appropriate sets
Owen Anderson9334fc62008-06-12 19:25:32 +00001892bool GVN::processInstruction(Instruction *I,
Chris Lattner7de20452008-03-21 22:01:16 +00001893 SmallVectorImpl<Instruction*> &toErase) {
Chris Lattnerff36c952009-09-21 02:42:51 +00001894 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
1895 bool Changed = processLoad(LI, toErase);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001896
Chris Lattnerff36c952009-09-21 02:42:51 +00001897 if (!Changed) {
1898 unsigned Num = VN.lookup_or_add(LI);
1899 localAvail[I->getParent()]->table.insert(std::make_pair(Num, LI));
Owen Andersone6b4ff82008-06-18 21:41:49 +00001900 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001901
Chris Lattnerff36c952009-09-21 02:42:51 +00001902 return Changed;
Owen Andersone6b4ff82008-06-18 21:41:49 +00001903 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001904
Chris Lattnerff36c952009-09-21 02:42:51 +00001905 uint32_t NextNum = VN.getNextUnusedValueNumber();
1906 unsigned Num = VN.lookup_or_add(I);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001907
Chris Lattnerff36c952009-09-21 02:42:51 +00001908 if (BranchInst *BI = dyn_cast<BranchInst>(I)) {
1909 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001910
Owen Andersonef8bf0f2009-04-01 23:53:49 +00001911 if (!BI->isConditional() || isa<Constant>(BI->getCondition()))
1912 return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001913
Chris Lattnerff36c952009-09-21 02:42:51 +00001914 Value *BranchCond = BI->getCondition();
1915 uint32_t CondVN = VN.lookup_or_add(BranchCond);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001916
Chris Lattnerff36c952009-09-21 02:42:51 +00001917 BasicBlock *TrueSucc = BI->getSuccessor(0);
1918 BasicBlock *FalseSucc = BI->getSuccessor(1);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001919
Chris Lattnerff36c952009-09-21 02:42:51 +00001920 if (TrueSucc->getSinglePredecessor())
1921 localAvail[TrueSucc]->table[CondVN] =
1922 ConstantInt::getTrue(TrueSucc->getContext());
1923 if (FalseSucc->getSinglePredecessor())
1924 localAvail[FalseSucc]->table[CondVN] =
1925 ConstantInt::getFalse(TrueSucc->getContext());
Owen Andersonef8bf0f2009-04-01 23:53:49 +00001926
1927 return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001928
Owen Andersonced50f82008-04-07 09:59:07 +00001929 // Allocations are always uniquely numbered, so we can save time and memory
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001930 // by fast failing them.
Victor Hernandezb1687302009-10-23 21:09:37 +00001931 } else if (isa<AllocaInst>(I) || isa<TerminatorInst>(I)) {
Chris Lattnerff36c952009-09-21 02:42:51 +00001932 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Andersonced50f82008-04-07 09:59:07 +00001933 return false;
Owen Andersone6b4ff82008-06-18 21:41:49 +00001934 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001935
Owen Andersone0143452007-08-16 22:02:55 +00001936 // Collapse PHI nodes
Owen Anderson98f6a6b2007-08-14 18:33:27 +00001937 if (PHINode* p = dyn_cast<PHINode>(I)) {
Chris Lattnerff36c952009-09-21 02:42:51 +00001938 Value *constVal = CollapsePhi(p);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001939
Owen Anderson98f6a6b2007-08-14 18:33:27 +00001940 if (constVal) {
Owen Andersone02ad522007-08-16 22:51:56 +00001941 p->replaceAllUsesWith(constVal);
Dan Gohmanc8d26652009-11-14 02:27:51 +00001942 if (MD && isa<PointerType>(constVal->getType()))
Chris Lattnerf81b0142008-12-09 22:06:23 +00001943 MD->invalidateCachedPointerInfo(constVal);
Owen Anderson575f2812008-12-23 00:49:51 +00001944 VN.erase(p);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001945
Owen Andersone02ad522007-08-16 22:51:56 +00001946 toErase.push_back(p);
Owen Andersone6b4ff82008-06-18 21:41:49 +00001947 } else {
Chris Lattnerff36c952009-09-21 02:42:51 +00001948 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Anderson98f6a6b2007-08-14 18:33:27 +00001949 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001950
Owen Anderson8a8d13c2008-07-03 17:44:33 +00001951 // If the number we were assigned was a brand new VN, then we don't
1952 // need to do a lookup to see if the number already exists
1953 // somewhere in the domtree: it can't!
Chris Lattnerff36c952009-09-21 02:42:51 +00001954 } else if (Num == NextNum) {
1955 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001956
Owen Andersona03e7862008-12-15 02:03:00 +00001957 // Perform fast-path value-number based elimination of values inherited from
1958 // dominators.
Chris Lattnerff36c952009-09-21 02:42:51 +00001959 } else if (Value *repl = lookupNumber(I->getParent(), Num)) {
Owen Andersonc772be72007-12-08 01:37:09 +00001960 // Remove it!
Owen Anderson5aff8002007-07-31 23:27:13 +00001961 VN.erase(I);
Owen Anderson85c40642007-07-24 17:55:58 +00001962 I->replaceAllUsesWith(repl);
Dan Gohmanc8d26652009-11-14 02:27:51 +00001963 if (MD && isa<PointerType>(repl->getType()))
Chris Lattnerf81b0142008-12-09 22:06:23 +00001964 MD->invalidateCachedPointerInfo(repl);
Owen Anderson85c40642007-07-24 17:55:58 +00001965 toErase.push_back(I);
1966 return true;
Owen Andersona03e7862008-12-15 02:03:00 +00001967
Owen Anderson8a8d13c2008-07-03 17:44:33 +00001968 } else {
Chris Lattnerff36c952009-09-21 02:42:51 +00001969 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Anderson85c40642007-07-24 17:55:58 +00001970 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001971
Owen Anderson85c40642007-07-24 17:55:58 +00001972 return false;
1973}
1974
Bill Wendling42f17f62008-12-22 22:32:22 +00001975/// runOnFunction - This is the main transformation entry point for a function.
Owen Andersonbe168b32007-08-14 18:04:11 +00001976bool GVN::runOnFunction(Function& F) {
Dan Gohmanc8d26652009-11-14 02:27:51 +00001977 if (!NoLoads)
1978 MD = &getAnalysis<MemoryDependenceAnalysis>();
Chris Lattner02ca4422008-12-01 00:40:32 +00001979 DT = &getAnalysis<DominatorTree>();
Owen Andersonbcf2bd52008-05-12 20:15:55 +00001980 VN.setAliasAnalysis(&getAnalysis<AliasAnalysis>());
Chris Lattner02ca4422008-12-01 00:40:32 +00001981 VN.setMemDep(MD);
1982 VN.setDomTree(DT);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001983
Chris Lattnerff36c952009-09-21 02:42:51 +00001984 bool Changed = false;
1985 bool ShouldContinue = true;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001986
Owen Anderson26ed2572008-07-16 17:52:31 +00001987 // Merge unconditional branches, allowing PRE to catch more
1988 // optimization opportunities.
1989 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ) {
Chris Lattnerff36c952009-09-21 02:42:51 +00001990 BasicBlock *BB = FI;
Owen Anderson26ed2572008-07-16 17:52:31 +00001991 ++FI;
Owen Andersonf59eef82008-07-17 00:01:40 +00001992 bool removedBlock = MergeBlockIntoPredecessor(BB, this);
1993 if (removedBlock) NumGVNBlocks++;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001994
Chris Lattnerff36c952009-09-21 02:42:51 +00001995 Changed |= removedBlock;
Owen Anderson26ed2572008-07-16 17:52:31 +00001996 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001997
Chris Lattner4bab29b2008-12-09 19:21:47 +00001998 unsigned Iteration = 0;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001999
Chris Lattnerff36c952009-09-21 02:42:51 +00002000 while (ShouldContinue) {
David Greeneac297082010-01-05 01:27:17 +00002001 DEBUG(dbgs() << "GVN iteration: " << Iteration << "\n");
Chris Lattnerff36c952009-09-21 02:42:51 +00002002 ShouldContinue = iterateOnFunction(F);
2003 Changed |= ShouldContinue;
Chris Lattner4bab29b2008-12-09 19:21:47 +00002004 ++Iteration;
Owen Andersonbe168b32007-08-14 18:04:11 +00002005 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002006
Owen Anderson916f4732008-07-18 18:03:38 +00002007 if (EnablePRE) {
Owen Anderson9c935902008-09-03 23:06:07 +00002008 bool PREChanged = true;
2009 while (PREChanged) {
2010 PREChanged = performPRE(F);
Chris Lattnerff36c952009-09-21 02:42:51 +00002011 Changed |= PREChanged;
Owen Anderson9c935902008-09-03 23:06:07 +00002012 }
Owen Anderson916f4732008-07-18 18:03:38 +00002013 }
Chris Lattner4bab29b2008-12-09 19:21:47 +00002014 // FIXME: Should perform GVN again after PRE does something. PRE can move
2015 // computations into blocks where they become fully redundant. Note that
2016 // we can't do this until PRE's critical edge splitting updates memdep.
2017 // Actually, when this happens, we should just fully integrate PRE into GVN.
Nuno Lopes274474b2008-10-10 16:25:50 +00002018
2019 cleanupGlobalSets();
2020
Chris Lattnerff36c952009-09-21 02:42:51 +00002021 return Changed;
Owen Andersonbe168b32007-08-14 18:04:11 +00002022}
2023
2024
Chris Lattnerff36c952009-09-21 02:42:51 +00002025bool GVN::processBlock(BasicBlock *BB) {
Chris Lattner4bab29b2008-12-09 19:21:47 +00002026 // FIXME: Kill off toErase by doing erasing eagerly in a helper function (and
2027 // incrementing BI before processing an instruction).
Owen Anderson9334fc62008-06-12 19:25:32 +00002028 SmallVector<Instruction*, 8> toErase;
Chris Lattnerff36c952009-09-21 02:42:51 +00002029 bool ChangedFunction = false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002030
Owen Anderson9334fc62008-06-12 19:25:32 +00002031 for (BasicBlock::iterator BI = BB->begin(), BE = BB->end();
2032 BI != BE;) {
Chris Lattnerff36c952009-09-21 02:42:51 +00002033 ChangedFunction |= processInstruction(BI, toErase);
Owen Anderson9334fc62008-06-12 19:25:32 +00002034 if (toErase.empty()) {
2035 ++BI;
2036 continue;
2037 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002038
Owen Anderson9334fc62008-06-12 19:25:32 +00002039 // If we need some instructions deleted, do it now.
2040 NumGVNInstr += toErase.size();
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002041
Owen Anderson9334fc62008-06-12 19:25:32 +00002042 // Avoid iterator invalidation.
2043 bool AtStart = BI == BB->begin();
2044 if (!AtStart)
2045 --BI;
2046
2047 for (SmallVector<Instruction*, 4>::iterator I = toErase.begin(),
Chris Lattner02ca4422008-12-01 00:40:32 +00002048 E = toErase.end(); I != E; ++I) {
David Greeneac297082010-01-05 01:27:17 +00002049 DEBUG(dbgs() << "GVN removed: " << **I << '\n');
Dan Gohmanc8d26652009-11-14 02:27:51 +00002050 if (MD) MD->removeInstruction(*I);
Owen Anderson9334fc62008-06-12 19:25:32 +00002051 (*I)->eraseFromParent();
Bill Wendling84049422008-12-22 21:57:30 +00002052 DEBUG(verifyRemoved(*I));
Chris Lattner02ca4422008-12-01 00:40:32 +00002053 }
Chris Lattner4bab29b2008-12-09 19:21:47 +00002054 toErase.clear();
Owen Anderson9334fc62008-06-12 19:25:32 +00002055
2056 if (AtStart)
2057 BI = BB->begin();
2058 else
2059 ++BI;
Owen Anderson9334fc62008-06-12 19:25:32 +00002060 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002061
Chris Lattnerff36c952009-09-21 02:42:51 +00002062 return ChangedFunction;
Owen Anderson9334fc62008-06-12 19:25:32 +00002063}
2064
Owen Andersone6b4ff82008-06-18 21:41:49 +00002065/// performPRE - Perform a purely local form of PRE that looks for diamond
2066/// control flow patterns and attempts to perform simple PRE at the join point.
Chris Lattner4790cb42009-10-31 22:11:15 +00002067bool GVN::performPRE(Function &F) {
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002068 bool Changed = false;
Owen Andersonec747c42008-06-19 19:54:19 +00002069 SmallVector<std::pair<TerminatorInst*, unsigned>, 4> toSplit;
Chris Lattner3304b562008-12-01 07:29:03 +00002070 DenseMap<BasicBlock*, Value*> predMap;
Owen Andersone6b4ff82008-06-18 21:41:49 +00002071 for (df_iterator<BasicBlock*> DI = df_begin(&F.getEntryBlock()),
2072 DE = df_end(&F.getEntryBlock()); DI != DE; ++DI) {
Chris Lattnerff36c952009-09-21 02:42:51 +00002073 BasicBlock *CurrentBlock = *DI;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002074
Owen Andersone6b4ff82008-06-18 21:41:49 +00002075 // Nothing to PRE in the entry block.
2076 if (CurrentBlock == &F.getEntryBlock()) continue;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002077
Owen Andersone6b4ff82008-06-18 21:41:49 +00002078 for (BasicBlock::iterator BI = CurrentBlock->begin(),
2079 BE = CurrentBlock->end(); BI != BE; ) {
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002080 Instruction *CurInst = BI++;
Duncan Sands2f500832009-05-06 06:49:50 +00002081
Victor Hernandezb1687302009-10-23 21:09:37 +00002082 if (isa<AllocaInst>(CurInst) ||
Victor Hernandez48c3c542009-09-18 22:35:49 +00002083 isa<TerminatorInst>(CurInst) || isa<PHINode>(CurInst) ||
Devang Patele9d08b82009-10-14 17:29:00 +00002084 CurInst->getType()->isVoidTy() ||
Duncan Sands2f500832009-05-06 06:49:50 +00002085 CurInst->mayReadFromMemory() || CurInst->mayHaveSideEffects() ||
John Criswell6e0aa282009-03-10 15:04:53 +00002086 isa<DbgInfoIntrinsic>(CurInst))
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002087 continue;
Duncan Sands2f500832009-05-06 06:49:50 +00002088
Chris Lattnerff36c952009-09-21 02:42:51 +00002089 uint32_t ValNo = VN.lookup(CurInst);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002090
Owen Andersone6b4ff82008-06-18 21:41:49 +00002091 // Look for the predecessors for PRE opportunities. We're
2092 // only trying to solve the basic diamond case, where
2093 // a value is computed in the successor and one predecessor,
2094 // but not the other. We also explicitly disallow cases
2095 // where the successor is its own predecessor, because they're
2096 // more complicated to get right.
Chris Lattnerff36c952009-09-21 02:42:51 +00002097 unsigned NumWith = 0;
2098 unsigned NumWithout = 0;
2099 BasicBlock *PREPred = 0;
Chris Lattner3304b562008-12-01 07:29:03 +00002100 predMap.clear();
2101
Owen Andersone6b4ff82008-06-18 21:41:49 +00002102 for (pred_iterator PI = pred_begin(CurrentBlock),
2103 PE = pred_end(CurrentBlock); PI != PE; ++PI) {
2104 // We're not interested in PRE where the block is its
Bob Wilson8998bda2010-02-03 00:33:21 +00002105 // own predecessor, or in blocks with predecessors
Owen Anderson2a412722008-06-20 01:15:47 +00002106 // that are not reachable.
2107 if (*PI == CurrentBlock) {
Chris Lattnerff36c952009-09-21 02:42:51 +00002108 NumWithout = 2;
Owen Anderson2a412722008-06-20 01:15:47 +00002109 break;
2110 } else if (!localAvail.count(*PI)) {
Chris Lattnerff36c952009-09-21 02:42:51 +00002111 NumWithout = 2;
Owen Anderson2a412722008-06-20 01:15:47 +00002112 break;
2113 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002114
2115 DenseMap<uint32_t, Value*>::iterator predV =
Chris Lattnerff36c952009-09-21 02:42:51 +00002116 localAvail[*PI]->table.find(ValNo);
Owen Anderson2a412722008-06-20 01:15:47 +00002117 if (predV == localAvail[*PI]->table.end()) {
Owen Andersone6b4ff82008-06-18 21:41:49 +00002118 PREPred = *PI;
Chris Lattnerff36c952009-09-21 02:42:51 +00002119 NumWithout++;
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002120 } else if (predV->second == CurInst) {
Chris Lattnerff36c952009-09-21 02:42:51 +00002121 NumWithout = 2;
Owen Andersone6b4ff82008-06-18 21:41:49 +00002122 } else {
Owen Anderson2a412722008-06-20 01:15:47 +00002123 predMap[*PI] = predV->second;
Chris Lattnerff36c952009-09-21 02:42:51 +00002124 NumWith++;
Owen Andersone6b4ff82008-06-18 21:41:49 +00002125 }
2126 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002127
Owen Andersone6b4ff82008-06-18 21:41:49 +00002128 // Don't do PRE when it might increase code size, i.e. when
2129 // we would need to insert instructions in more than one pred.
Chris Lattnerff36c952009-09-21 02:42:51 +00002130 if (NumWithout != 1 || NumWith == 0)
Owen Andersone6b4ff82008-06-18 21:41:49 +00002131 continue;
Chris Lattner4790cb42009-10-31 22:11:15 +00002132
2133 // Don't do PRE across indirect branch.
2134 if (isa<IndirectBrInst>(PREPred->getTerminator()))
2135 continue;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002136
Owen Andersonec747c42008-06-19 19:54:19 +00002137 // We can't do PRE safely on a critical edge, so instead we schedule
2138 // the edge to be split and perform the PRE the next time we iterate
2139 // on the function.
Chris Lattnerff36c952009-09-21 02:42:51 +00002140 unsigned SuccNum = 0;
Owen Andersonec747c42008-06-19 19:54:19 +00002141 for (unsigned i = 0, e = PREPred->getTerminator()->getNumSuccessors();
2142 i != e; ++i)
Owen Anderson9c935902008-09-03 23:06:07 +00002143 if (PREPred->getTerminator()->getSuccessor(i) == CurrentBlock) {
Chris Lattnerff36c952009-09-21 02:42:51 +00002144 SuccNum = i;
Owen Andersonec747c42008-06-19 19:54:19 +00002145 break;
2146 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002147
Chris Lattnerff36c952009-09-21 02:42:51 +00002148 if (isCriticalEdge(PREPred->getTerminator(), SuccNum)) {
2149 toSplit.push_back(std::make_pair(PREPred->getTerminator(), SuccNum));
Owen Andersonec747c42008-06-19 19:54:19 +00002150 continue;
2151 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002152
Bob Wilson8998bda2010-02-03 00:33:21 +00002153 // Instantiate the expression in the predecessor that lacked it.
Owen Andersone6b4ff82008-06-18 21:41:49 +00002154 // Because we are going top-down through the block, all value numbers
2155 // will be available in the predecessor by the time we need them. Any
Bob Wilson8998bda2010-02-03 00:33:21 +00002156 // that weren't originally present will have been instantiated earlier
Owen Andersone6b4ff82008-06-18 21:41:49 +00002157 // in this loop.
Nick Lewyckyc94270c2009-09-27 07:38:41 +00002158 Instruction *PREInstr = CurInst->clone();
Owen Andersone6b4ff82008-06-18 21:41:49 +00002159 bool success = true;
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002160 for (unsigned i = 0, e = CurInst->getNumOperands(); i != e; ++i) {
2161 Value *Op = PREInstr->getOperand(i);
2162 if (isa<Argument>(Op) || isa<Constant>(Op) || isa<GlobalValue>(Op))
2163 continue;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002164
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002165 if (Value *V = lookupNumber(PREPred, VN.lookup(Op))) {
2166 PREInstr->setOperand(i, V);
2167 } else {
2168 success = false;
2169 break;
Owen Anderson14c612f2008-07-11 20:05:13 +00002170 }
Owen Andersone6b4ff82008-06-18 21:41:49 +00002171 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002172
Owen Andersone6b4ff82008-06-18 21:41:49 +00002173 // Fail out if we encounter an operand that is not available in
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002174 // the PRE predecessor. This is typically because of loads which
Owen Andersone6b4ff82008-06-18 21:41:49 +00002175 // are not value numbered precisely.
2176 if (!success) {
2177 delete PREInstr;
Bill Wendling3858cae2008-12-22 22:14:07 +00002178 DEBUG(verifyRemoved(PREInstr));
Owen Andersone6b4ff82008-06-18 21:41:49 +00002179 continue;
2180 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002181
Owen Andersone6b4ff82008-06-18 21:41:49 +00002182 PREInstr->insertBefore(PREPred->getTerminator());
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002183 PREInstr->setName(CurInst->getName() + ".pre");
Owen Anderson2a412722008-06-20 01:15:47 +00002184 predMap[PREPred] = PREInstr;
Chris Lattnerff36c952009-09-21 02:42:51 +00002185 VN.add(PREInstr, ValNo);
Owen Andersone6b4ff82008-06-18 21:41:49 +00002186 NumGVNPRE++;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002187
Owen Andersone6b4ff82008-06-18 21:41:49 +00002188 // Update the availability map to include the new instruction.
Chris Lattnerff36c952009-09-21 02:42:51 +00002189 localAvail[PREPred]->table.insert(std::make_pair(ValNo, PREInstr));
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002190
Owen Andersone6b4ff82008-06-18 21:41:49 +00002191 // Create a PHI to make the value available in this block.
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002192 PHINode* Phi = PHINode::Create(CurInst->getType(),
2193 CurInst->getName() + ".pre-phi",
Owen Andersone6b4ff82008-06-18 21:41:49 +00002194 CurrentBlock->begin());
2195 for (pred_iterator PI = pred_begin(CurrentBlock),
2196 PE = pred_end(CurrentBlock); PI != PE; ++PI)
Owen Anderson2a412722008-06-20 01:15:47 +00002197 Phi->addIncoming(predMap[*PI], *PI);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002198
Chris Lattnerff36c952009-09-21 02:42:51 +00002199 VN.add(Phi, ValNo);
2200 localAvail[CurrentBlock]->table[ValNo] = Phi;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002201
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002202 CurInst->replaceAllUsesWith(Phi);
Dan Gohmanc8d26652009-11-14 02:27:51 +00002203 if (MD && isa<PointerType>(Phi->getType()))
Chris Lattnerf81b0142008-12-09 22:06:23 +00002204 MD->invalidateCachedPointerInfo(Phi);
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002205 VN.erase(CurInst);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002206
David Greeneac297082010-01-05 01:27:17 +00002207 DEBUG(dbgs() << "GVN PRE removed: " << *CurInst << '\n');
Dan Gohmanc8d26652009-11-14 02:27:51 +00002208 if (MD) MD->removeInstruction(CurInst);
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002209 CurInst->eraseFromParent();
Bill Wendling84049422008-12-22 21:57:30 +00002210 DEBUG(verifyRemoved(CurInst));
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002211 Changed = true;
Owen Andersone6b4ff82008-06-18 21:41:49 +00002212 }
2213 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002214
Owen Andersonec747c42008-06-19 19:54:19 +00002215 for (SmallVector<std::pair<TerminatorInst*, unsigned>, 4>::iterator
Anton Korobeynikov2e8710c2008-12-05 19:38:49 +00002216 I = toSplit.begin(), E = toSplit.end(); I != E; ++I)
Owen Andersonec747c42008-06-19 19:54:19 +00002217 SplitCriticalEdge(I->first, I->second, this);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002218
Anton Korobeynikov2e8710c2008-12-05 19:38:49 +00002219 return Changed || toSplit.size();
Owen Andersone6b4ff82008-06-18 21:41:49 +00002220}
2221
Bill Wendling42f17f62008-12-22 22:32:22 +00002222/// iterateOnFunction - Executes one iteration of GVN
Owen Andersonbe168b32007-08-14 18:04:11 +00002223bool GVN::iterateOnFunction(Function &F) {
Nuno Lopes274474b2008-10-10 16:25:50 +00002224 cleanupGlobalSets();
Chris Lattner98054902008-03-21 21:33:23 +00002225
Owen Andersonef8bf0f2009-04-01 23:53:49 +00002226 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2227 DE = df_end(DT->getRootNode()); DI != DE; ++DI) {
2228 if (DI->getIDom())
2229 localAvail[DI->getBlock()] =
2230 new ValueNumberScope(localAvail[DI->getIDom()->getBlock()]);
2231 else
2232 localAvail[DI->getBlock()] = new ValueNumberScope(0);
2233 }
2234
Owen Anderson85c40642007-07-24 17:55:58 +00002235 // Top-down walk of the dominator tree
Chris Lattnerff36c952009-09-21 02:42:51 +00002236 bool Changed = false;
Owen Andersonef136f52008-12-15 03:52:17 +00002237#if 0
2238 // Needed for value numbering with phi construction to work.
Owen Andersona03e7862008-12-15 02:03:00 +00002239 ReversePostOrderTraversal<Function*> RPOT(&F);
2240 for (ReversePostOrderTraversal<Function*>::rpo_iterator RI = RPOT.begin(),
2241 RE = RPOT.end(); RI != RE; ++RI)
Chris Lattnerff36c952009-09-21 02:42:51 +00002242 Changed |= processBlock(*RI);
Owen Andersonef136f52008-12-15 03:52:17 +00002243#else
2244 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2245 DE = df_end(DT->getRootNode()); DI != DE; ++DI)
Chris Lattnerff36c952009-09-21 02:42:51 +00002246 Changed |= processBlock(DI->getBlock());
Owen Andersonef136f52008-12-15 03:52:17 +00002247#endif
2248
Chris Lattnerff36c952009-09-21 02:42:51 +00002249 return Changed;
Owen Anderson85c40642007-07-24 17:55:58 +00002250}
Nuno Lopes274474b2008-10-10 16:25:50 +00002251
2252void GVN::cleanupGlobalSets() {
2253 VN.clear();
Nuno Lopes274474b2008-10-10 16:25:50 +00002254
2255 for (DenseMap<BasicBlock*, ValueNumberScope*>::iterator
2256 I = localAvail.begin(), E = localAvail.end(); I != E; ++I)
2257 delete I->second;
2258 localAvail.clear();
2259}
Bill Wendling2a023742008-12-22 21:36:08 +00002260
2261/// verifyRemoved - Verify that the specified instruction does not occur in our
2262/// internal data structures.
Bill Wendlingf9c0e9e2008-12-22 22:28:56 +00002263void GVN::verifyRemoved(const Instruction *Inst) const {
2264 VN.verifyRemoved(Inst);
Bill Wendling3858cae2008-12-22 22:14:07 +00002265
Bill Wendlingf9c0e9e2008-12-22 22:28:56 +00002266 // Walk through the value number scope to make sure the instruction isn't
2267 // ferreted away in it.
Jeffrey Yasskin8154d2e2009-11-10 01:02:17 +00002268 for (DenseMap<BasicBlock*, ValueNumberScope*>::const_iterator
Bill Wendlingf9c0e9e2008-12-22 22:28:56 +00002269 I = localAvail.begin(), E = localAvail.end(); I != E; ++I) {
2270 const ValueNumberScope *VNS = I->second;
2271
2272 while (VNS) {
Jeffrey Yasskin8154d2e2009-11-10 01:02:17 +00002273 for (DenseMap<uint32_t, Value*>::const_iterator
Bill Wendlingf9c0e9e2008-12-22 22:28:56 +00002274 II = VNS->table.begin(), IE = VNS->table.end(); II != IE; ++II) {
2275 assert(II->second != Inst && "Inst still in value numbering scope!");
2276 }
2277
2278 VNS = VNS->parent;
Bill Wendling3858cae2008-12-22 22:14:07 +00002279 }
2280 }
Bill Wendling2a023742008-12-22 21:36:08 +00002281}