blob: 65b34b13dd47b9431cc90e0b3a14267d211e6f71 [file] [log] [blame]
Chris Lattnerd2a653a2008-12-05 07:49:08 +00001//===- GVN.cpp - Eliminate redundant values and loads ---------------------===//
Owen Andersonab6ec2e2007-07-24 17:55:58 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Owen Andersonab6ec2e2007-07-24 17:55:58 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This pass performs global value numbering to eliminate fully redundant
11// instructions. It also performs simple dead load elimination.
12//
John Criswell073e4d12009-03-10 15:04:53 +000013// Note that this pass does the value numbering itself; it does not use the
Matthijs Kooijman5afc2742008-06-05 07:55:49 +000014// ValueNumbering analysis passes.
15//
Owen Andersonab6ec2e2007-07-24 17:55:58 +000016//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "gvn"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000019#include "llvm/Transforms/Scalar.h"
Owen Anderson5e5599b2007-07-25 19:57:03 +000020#include "llvm/BasicBlock.h"
Owen Andersondbf23cc2007-07-26 18:26:51 +000021#include "llvm/Constants.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000022#include "llvm/DerivedTypes.h"
Chris Lattner17079fc2009-12-28 21:28:46 +000023#include "llvm/GlobalVariable.h"
Owen Andersondbf23cc2007-07-26 18:26:51 +000024#include "llvm/Function.h"
Devang Patele8c6d312009-03-06 02:59:27 +000025#include "llvm/IntrinsicInst.h"
Owen Andersonb5618da2009-07-03 00:17:18 +000026#include "llvm/LLVMContext.h"
Chris Lattner0a9616d2009-09-21 05:57:11 +000027#include "llvm/Operator.h"
Owen Andersondbf23cc2007-07-26 18:26:51 +000028#include "llvm/Value.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000029#include "llvm/ADT/DenseMap.h"
30#include "llvm/ADT/DepthFirstIterator.h"
Owen Andersonbfe133e2008-12-15 02:03:00 +000031#include "llvm/ADT/PostOrderIterator.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000032#include "llvm/ADT/SmallPtrSet.h"
33#include "llvm/ADT/SmallVector.h"
34#include "llvm/ADT/Statistic.h"
Owen Anderson09b83ba2007-10-18 19:39:33 +000035#include "llvm/Analysis/AliasAnalysis.h"
Chris Lattner778cb922009-12-06 05:29:56 +000036#include "llvm/Analysis/ConstantFolding.h"
37#include "llvm/Analysis/Dominators.h"
Victor Hernandezf390e042009-10-27 20:05:49 +000038#include "llvm/Analysis/MemoryBuiltins.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000039#include "llvm/Analysis/MemoryDependenceAnalysis.h"
Chris Lattner972e6d82009-12-09 01:59:31 +000040#include "llvm/Analysis/PHITransAddr.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000041#include "llvm/Support/CFG.h"
Owen Andersone780d662008-06-19 19:57:25 +000042#include "llvm/Support/CommandLine.h"
Chris Lattnerd528b212008-03-29 04:36:18 +000043#include "llvm/Support/Debug.h"
Torok Edwin56d06592009-07-11 20:10:48 +000044#include "llvm/Support/ErrorHandling.h"
Chris Lattner0a9616d2009-09-21 05:57:11 +000045#include "llvm/Support/GetElementPtrTypeIterator.h"
Chris Lattner42376062009-12-06 01:57:02 +000046#include "llvm/Support/IRBuilder.h"
Daniel Dunbar0dd5e1e2009-07-25 00:23:56 +000047#include "llvm/Support/raw_ostream.h"
Chris Lattner1dd48c32009-09-20 19:03:47 +000048#include "llvm/Target/TargetData.h"
Owen Andersonfdf9f162008-06-19 19:54:19 +000049#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Dale Johannesen81b64632009-06-17 20:48:23 +000050#include "llvm/Transforms/Utils/Local.h"
Chris Lattnerb6c65fa2009-10-10 23:50:30 +000051#include "llvm/Transforms/Utils/SSAUpdater.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000052using namespace llvm;
53
Bill Wendling3c793442008-12-22 22:14:07 +000054STATISTIC(NumGVNInstr, "Number of instructions deleted");
55STATISTIC(NumGVNLoad, "Number of loads deleted");
56STATISTIC(NumGVNPRE, "Number of instructions PRE'd");
Owen Anderson53d546e2008-07-15 16:28:06 +000057STATISTIC(NumGVNBlocks, "Number of blocks merged");
Bill Wendling3c793442008-12-22 22:14:07 +000058STATISTIC(NumPRELoad, "Number of loads PRE'd");
Chris Lattner168be762008-03-22 04:13:49 +000059
Evan Cheng9598f932008-06-20 01:01:07 +000060static cl::opt<bool> EnablePRE("enable-pre",
Owen Andersonaddbe3e2008-07-17 19:41:00 +000061 cl::init(true), cl::Hidden);
Dan Gohmana8f8a852009-06-15 18:30:15 +000062static cl::opt<bool> EnableLoadPRE("enable-load-pre", cl::init(true));
Bob Wilson11361662010-02-28 05:34:05 +000063static cl::opt<bool> EnableFullLoadPRE("enable-full-load-pre", cl::init(false));
Owen Andersone780d662008-06-19 19:57:25 +000064
Owen Andersonab6ec2e2007-07-24 17:55:58 +000065//===----------------------------------------------------------------------===//
66// ValueTable Class
67//===----------------------------------------------------------------------===//
68
69/// This class holds the mapping between values and value numbers. It is used
70/// as an efficient mechanism to determine the expression-wise equivalence of
71/// two values.
72namespace {
Chris Lattner2dd09db2009-09-02 06:11:42 +000073 struct Expression {
Owen Andersoncdea3572010-01-17 19:33:27 +000074 enum ExpressionOpcode {
75 ADD = Instruction::Add,
76 FADD = Instruction::FAdd,
77 SUB = Instruction::Sub,
78 FSUB = Instruction::FSub,
79 MUL = Instruction::Mul,
80 FMUL = Instruction::FMul,
81 UDIV = Instruction::UDiv,
82 SDIV = Instruction::SDiv,
83 FDIV = Instruction::FDiv,
84 UREM = Instruction::URem,
85 SREM = Instruction::SRem,
86 FREM = Instruction::FRem,
87 SHL = Instruction::Shl,
88 LSHR = Instruction::LShr,
89 ASHR = Instruction::AShr,
90 AND = Instruction::And,
91 OR = Instruction::Or,
92 XOR = Instruction::Xor,
93 TRUNC = Instruction::Trunc,
94 ZEXT = Instruction::ZExt,
95 SEXT = Instruction::SExt,
96 FPTOUI = Instruction::FPToUI,
97 FPTOSI = Instruction::FPToSI,
98 UITOFP = Instruction::UIToFP,
99 SITOFP = Instruction::SIToFP,
100 FPTRUNC = Instruction::FPTrunc,
101 FPEXT = Instruction::FPExt,
102 PTRTOINT = Instruction::PtrToInt,
103 INTTOPTR = Instruction::IntToPtr,
104 BITCAST = Instruction::BitCast,
105 ICMPEQ, ICMPNE, ICMPUGT, ICMPUGE, ICMPULT, ICMPULE,
106 ICMPSGT, ICMPSGE, ICMPSLT, ICMPSLE, FCMPOEQ,
107 FCMPOGT, FCMPOGE, FCMPOLT, FCMPOLE, FCMPONE,
108 FCMPORD, FCMPUNO, FCMPUEQ, FCMPUGT, FCMPUGE,
109 FCMPULT, FCMPULE, FCMPUNE, EXTRACT, INSERT,
110 SHUFFLE, SELECT, GEP, CALL, CONSTANT,
111 INSERTVALUE, EXTRACTVALUE, EMPTY, TOMBSTONE };
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000112
113 ExpressionOpcode opcode;
114 const Type* type;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000115 SmallVector<uint32_t, 4> varargs;
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000116 Value *function;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000117
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000118 Expression() { }
119 Expression(ExpressionOpcode o) : opcode(o) { }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000120
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000121 bool operator==(const Expression &other) const {
122 if (opcode != other.opcode)
123 return false;
124 else if (opcode == EMPTY || opcode == TOMBSTONE)
125 return true;
126 else if (type != other.type)
127 return false;
Owen Anderson09b83ba2007-10-18 19:39:33 +0000128 else if (function != other.function)
129 return false;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000130 else {
131 if (varargs.size() != other.varargs.size())
132 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000133
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000134 for (size_t i = 0; i < varargs.size(); ++i)
135 if (varargs[i] != other.varargs[i])
136 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000137
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000138 return true;
139 }
140 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000141
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000142 bool operator!=(const Expression &other) const {
Bill Wendling86f01cb2008-12-22 22:16:31 +0000143 return !(*this == other);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000144 }
145 };
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000146
Chris Lattner2dd09db2009-09-02 06:11:42 +0000147 class ValueTable {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000148 private:
149 DenseMap<Value*, uint32_t> valueNumbering;
150 DenseMap<Expression, uint32_t> expressionNumbering;
Owen Andersonf7928602008-05-12 20:15:55 +0000151 AliasAnalysis* AA;
152 MemoryDependenceAnalysis* MD;
153 DominatorTree* DT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000154
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000155 uint32_t nextValueNumber;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000156
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000157 Expression::ExpressionOpcode getOpcode(CmpInst* C);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000158 Expression create_expression(BinaryOperator* BO);
159 Expression create_expression(CmpInst* C);
160 Expression create_expression(ShuffleVectorInst* V);
161 Expression create_expression(ExtractElementInst* C);
162 Expression create_expression(InsertElementInst* V);
163 Expression create_expression(SelectInst* V);
164 Expression create_expression(CastInst* C);
165 Expression create_expression(GetElementPtrInst* G);
Owen Anderson09b83ba2007-10-18 19:39:33 +0000166 Expression create_expression(CallInst* C);
Owen Anderson69057b82008-05-13 08:17:22 +0000167 Expression create_expression(Constant* C);
Owen Anderson168ad692009-10-19 22:14:22 +0000168 Expression create_expression(ExtractValueInst* C);
169 Expression create_expression(InsertValueInst* C);
170
171 uint32_t lookup_or_add_call(CallInst* C);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000172 public:
Dan Gohmanc4971722009-04-01 16:37:47 +0000173 ValueTable() : nextValueNumber(1) { }
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000174 uint32_t lookup_or_add(Value *V);
175 uint32_t lookup(Value *V) const;
176 void add(Value *V, uint32_t num);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000177 void clear();
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000178 void erase(Value *v);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000179 unsigned size();
Owen Andersonf7928602008-05-12 20:15:55 +0000180 void setAliasAnalysis(AliasAnalysis* A) { AA = A; }
Chris Lattner8541ede2008-12-01 00:40:32 +0000181 AliasAnalysis *getAliasAnalysis() const { return AA; }
Owen Andersonf7928602008-05-12 20:15:55 +0000182 void setMemDep(MemoryDependenceAnalysis* M) { MD = M; }
183 void setDomTree(DominatorTree* D) { DT = D; }
Owen Anderson3ea90a72008-07-03 17:44:33 +0000184 uint32_t getNextUnusedValueNumber() { return nextValueNumber; }
Bill Wendling6b18a392008-12-22 21:36:08 +0000185 void verifyRemoved(const Value *) const;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000186 };
187}
188
189namespace llvm {
Chris Lattner0625bd62007-09-17 18:34:04 +0000190template <> struct DenseMapInfo<Expression> {
Owen Anderson9699a6e2007-08-02 18:16:06 +0000191 static inline Expression getEmptyKey() {
192 return Expression(Expression::EMPTY);
193 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000194
Owen Anderson9699a6e2007-08-02 18:16:06 +0000195 static inline Expression getTombstoneKey() {
196 return Expression(Expression::TOMBSTONE);
197 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000198
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000199 static unsigned getHashValue(const Expression e) {
200 unsigned hash = e.opcode;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000201
Anton Korobeynikov1bfd1212008-02-20 11:26:25 +0000202 hash = ((unsigned)((uintptr_t)e.type >> 4) ^
Owen Anderson168ad692009-10-19 22:14:22 +0000203 (unsigned)((uintptr_t)e.type >> 9));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000204
Owen Anderson9699a6e2007-08-02 18:16:06 +0000205 for (SmallVector<uint32_t, 4>::const_iterator I = e.varargs.begin(),
206 E = e.varargs.end(); I != E; ++I)
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000207 hash = *I + hash * 37;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000208
Anton Korobeynikov1bfd1212008-02-20 11:26:25 +0000209 hash = ((unsigned)((uintptr_t)e.function >> 4) ^
210 (unsigned)((uintptr_t)e.function >> 9)) +
211 hash * 37;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000212
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000213 return hash;
214 }
Chris Lattner0625bd62007-09-17 18:34:04 +0000215 static bool isEqual(const Expression &LHS, const Expression &RHS) {
216 return LHS == RHS;
217 }
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000218};
Chris Lattner45d040b2009-12-15 07:26:43 +0000219
220template <>
221struct isPodLike<Expression> { static const bool value = true; };
222
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000223}
224
225//===----------------------------------------------------------------------===//
226// ValueTable Internal Functions
227//===----------------------------------------------------------------------===//
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000228
229Expression::ExpressionOpcode ValueTable::getOpcode(CmpInst* C) {
Nick Lewyckya21d3da2009-07-08 03:04:38 +0000230 if (isa<ICmpInst>(C)) {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000231 switch (C->getPredicate()) {
Chris Lattner2876a642008-03-21 21:14:38 +0000232 default: // THIS SHOULD NEVER HAPPEN
Torok Edwinfbcc6632009-07-14 16:55:14 +0000233 llvm_unreachable("Comparison with unknown predicate?");
Chris Lattner2876a642008-03-21 21:14:38 +0000234 case ICmpInst::ICMP_EQ: return Expression::ICMPEQ;
235 case ICmpInst::ICMP_NE: return Expression::ICMPNE;
236 case ICmpInst::ICMP_UGT: return Expression::ICMPUGT;
237 case ICmpInst::ICMP_UGE: return Expression::ICMPUGE;
238 case ICmpInst::ICMP_ULT: return Expression::ICMPULT;
239 case ICmpInst::ICMP_ULE: return Expression::ICMPULE;
240 case ICmpInst::ICMP_SGT: return Expression::ICMPSGT;
241 case ICmpInst::ICMP_SGE: return Expression::ICMPSGE;
242 case ICmpInst::ICMP_SLT: return Expression::ICMPSLT;
243 case ICmpInst::ICMP_SLE: return Expression::ICMPSLE;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000244 }
Nick Lewyckya21d3da2009-07-08 03:04:38 +0000245 } else {
246 switch (C->getPredicate()) {
247 default: // THIS SHOULD NEVER HAPPEN
Torok Edwinfbcc6632009-07-14 16:55:14 +0000248 llvm_unreachable("Comparison with unknown predicate?");
Nick Lewyckya21d3da2009-07-08 03:04:38 +0000249 case FCmpInst::FCMP_OEQ: return Expression::FCMPOEQ;
250 case FCmpInst::FCMP_OGT: return Expression::FCMPOGT;
251 case FCmpInst::FCMP_OGE: return Expression::FCMPOGE;
252 case FCmpInst::FCMP_OLT: return Expression::FCMPOLT;
253 case FCmpInst::FCMP_OLE: return Expression::FCMPOLE;
254 case FCmpInst::FCMP_ONE: return Expression::FCMPONE;
255 case FCmpInst::FCMP_ORD: return Expression::FCMPORD;
256 case FCmpInst::FCMP_UNO: return Expression::FCMPUNO;
257 case FCmpInst::FCMP_UEQ: return Expression::FCMPUEQ;
258 case FCmpInst::FCMP_UGT: return Expression::FCMPUGT;
259 case FCmpInst::FCMP_UGE: return Expression::FCMPUGE;
260 case FCmpInst::FCMP_ULT: return Expression::FCMPULT;
261 case FCmpInst::FCMP_ULE: return Expression::FCMPULE;
262 case FCmpInst::FCMP_UNE: return Expression::FCMPUNE;
263 }
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000264 }
265}
266
Owen Anderson09b83ba2007-10-18 19:39:33 +0000267Expression ValueTable::create_expression(CallInst* C) {
268 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000269
Owen Anderson09b83ba2007-10-18 19:39:33 +0000270 e.type = C->getType();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000271 e.function = C->getCalledFunction();
272 e.opcode = Expression::CALL;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000273
Eric Christopher7258dcd2010-04-16 23:37:20 +0000274 for (CallInst::op_iterator I = C->op_begin()+1, E = C->op_end();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000275 I != E; ++I)
Owen Anderson1e73f292008-04-11 05:11:49 +0000276 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000277
Owen Anderson09b83ba2007-10-18 19:39:33 +0000278 return e;
279}
280
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000281Expression ValueTable::create_expression(BinaryOperator* BO) {
282 Expression e;
Owen Anderson168ad692009-10-19 22:14:22 +0000283 e.varargs.push_back(lookup_or_add(BO->getOperand(0)));
284 e.varargs.push_back(lookup_or_add(BO->getOperand(1)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000285 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000286 e.type = BO->getType();
Owen Andersoncdea3572010-01-17 19:33:27 +0000287 e.opcode = static_cast<Expression::ExpressionOpcode>(BO->getOpcode());
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000288
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000289 return e;
290}
291
292Expression ValueTable::create_expression(CmpInst* C) {
293 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000294
Owen Anderson168ad692009-10-19 22:14:22 +0000295 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
296 e.varargs.push_back(lookup_or_add(C->getOperand(1)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000297 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000298 e.type = C->getType();
299 e.opcode = getOpcode(C);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000300
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000301 return e;
302}
303
304Expression ValueTable::create_expression(CastInst* C) {
305 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000306
Owen Anderson168ad692009-10-19 22:14:22 +0000307 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000308 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000309 e.type = C->getType();
Owen Andersoncdea3572010-01-17 19:33:27 +0000310 e.opcode = static_cast<Expression::ExpressionOpcode>(C->getOpcode());
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000311
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000312 return e;
313}
314
315Expression ValueTable::create_expression(ShuffleVectorInst* S) {
316 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000317
Owen Anderson168ad692009-10-19 22:14:22 +0000318 e.varargs.push_back(lookup_or_add(S->getOperand(0)));
319 e.varargs.push_back(lookup_or_add(S->getOperand(1)));
320 e.varargs.push_back(lookup_or_add(S->getOperand(2)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000321 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000322 e.type = S->getType();
323 e.opcode = Expression::SHUFFLE;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000324
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000325 return e;
326}
327
328Expression ValueTable::create_expression(ExtractElementInst* E) {
329 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000330
Owen Anderson168ad692009-10-19 22:14:22 +0000331 e.varargs.push_back(lookup_or_add(E->getOperand(0)));
332 e.varargs.push_back(lookup_or_add(E->getOperand(1)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000333 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000334 e.type = E->getType();
335 e.opcode = Expression::EXTRACT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000336
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000337 return e;
338}
339
340Expression ValueTable::create_expression(InsertElementInst* I) {
341 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000342
Owen Anderson168ad692009-10-19 22:14:22 +0000343 e.varargs.push_back(lookup_or_add(I->getOperand(0)));
344 e.varargs.push_back(lookup_or_add(I->getOperand(1)));
345 e.varargs.push_back(lookup_or_add(I->getOperand(2)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000346 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000347 e.type = I->getType();
348 e.opcode = Expression::INSERT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000349
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000350 return e;
351}
352
353Expression ValueTable::create_expression(SelectInst* I) {
354 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000355
Owen Anderson168ad692009-10-19 22:14:22 +0000356 e.varargs.push_back(lookup_or_add(I->getCondition()));
357 e.varargs.push_back(lookup_or_add(I->getTrueValue()));
358 e.varargs.push_back(lookup_or_add(I->getFalseValue()));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000359 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000360 e.type = I->getType();
361 e.opcode = Expression::SELECT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000362
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000363 return e;
364}
365
366Expression ValueTable::create_expression(GetElementPtrInst* G) {
367 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000368
Owen Anderson168ad692009-10-19 22:14:22 +0000369 e.varargs.push_back(lookup_or_add(G->getPointerOperand()));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000370 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000371 e.type = G->getType();
372 e.opcode = Expression::GEP;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000373
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000374 for (GetElementPtrInst::op_iterator I = G->idx_begin(), E = G->idx_end();
375 I != E; ++I)
Owen Anderson1e73f292008-04-11 05:11:49 +0000376 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000377
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000378 return e;
379}
380
Owen Anderson168ad692009-10-19 22:14:22 +0000381Expression ValueTable::create_expression(ExtractValueInst* E) {
382 Expression e;
383
384 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
385 for (ExtractValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
386 II != IE; ++II)
387 e.varargs.push_back(*II);
388 e.function = 0;
389 e.type = E->getType();
390 e.opcode = Expression::EXTRACTVALUE;
391
392 return e;
393}
394
395Expression ValueTable::create_expression(InsertValueInst* E) {
396 Expression e;
397
398 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
399 e.varargs.push_back(lookup_or_add(E->getInsertedValueOperand()));
400 for (InsertValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
401 II != IE; ++II)
402 e.varargs.push_back(*II);
403 e.function = 0;
404 e.type = E->getType();
405 e.opcode = Expression::INSERTVALUE;
406
407 return e;
408}
409
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000410//===----------------------------------------------------------------------===//
411// ValueTable External Functions
412//===----------------------------------------------------------------------===//
413
Owen Anderson6a903bc2008-06-18 21:41:49 +0000414/// add - Insert a value into the table with a specified value number.
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000415void ValueTable::add(Value *V, uint32_t num) {
Owen Anderson6a903bc2008-06-18 21:41:49 +0000416 valueNumbering.insert(std::make_pair(V, num));
417}
418
Owen Anderson168ad692009-10-19 22:14:22 +0000419uint32_t ValueTable::lookup_or_add_call(CallInst* C) {
420 if (AA->doesNotAccessMemory(C)) {
421 Expression exp = create_expression(C);
422 uint32_t& e = expressionNumbering[exp];
423 if (!e) e = nextValueNumber++;
424 valueNumbering[C] = e;
425 return e;
426 } else if (AA->onlyReadsMemory(C)) {
427 Expression exp = create_expression(C);
428 uint32_t& e = expressionNumbering[exp];
429 if (!e) {
430 e = nextValueNumber++;
431 valueNumbering[C] = e;
432 return e;
433 }
Dan Gohman81132462009-11-14 02:27:51 +0000434 if (!MD) {
435 e = nextValueNumber++;
436 valueNumbering[C] = e;
437 return e;
438 }
Owen Anderson168ad692009-10-19 22:14:22 +0000439
440 MemDepResult local_dep = MD->getDependency(C);
441
442 if (!local_dep.isDef() && !local_dep.isNonLocal()) {
443 valueNumbering[C] = nextValueNumber;
444 return nextValueNumber++;
445 }
446
447 if (local_dep.isDef()) {
448 CallInst* local_cdep = cast<CallInst>(local_dep.getInst());
449
450 if (local_cdep->getNumOperands() != C->getNumOperands()) {
451 valueNumbering[C] = nextValueNumber;
452 return nextValueNumber++;
453 }
454
Eric Christopher7258dcd2010-04-16 23:37:20 +0000455 for (unsigned i = 1; i < C->getNumOperands(); ++i) {
Owen Anderson168ad692009-10-19 22:14:22 +0000456 uint32_t c_vn = lookup_or_add(C->getOperand(i));
457 uint32_t cd_vn = lookup_or_add(local_cdep->getOperand(i));
458 if (c_vn != cd_vn) {
459 valueNumbering[C] = nextValueNumber;
460 return nextValueNumber++;
461 }
462 }
463
464 uint32_t v = lookup_or_add(local_cdep);
465 valueNumbering[C] = v;
466 return v;
467 }
468
469 // Non-local case.
470 const MemoryDependenceAnalysis::NonLocalDepInfo &deps =
471 MD->getNonLocalCallDependency(CallSite(C));
472 // FIXME: call/call dependencies for readonly calls should return def, not
473 // clobber! Move the checking logic to MemDep!
474 CallInst* cdep = 0;
475
476 // Check to see if we have a single dominating call instruction that is
477 // identical to C.
478 for (unsigned i = 0, e = deps.size(); i != e; ++i) {
Chris Lattner0c315472009-12-09 07:08:01 +0000479 const NonLocalDepEntry *I = &deps[i];
Owen Anderson168ad692009-10-19 22:14:22 +0000480 // Ignore non-local dependencies.
Chris Lattner0c315472009-12-09 07:08:01 +0000481 if (I->getResult().isNonLocal())
Owen Anderson168ad692009-10-19 22:14:22 +0000482 continue;
483
484 // We don't handle non-depedencies. If we already have a call, reject
485 // instruction dependencies.
Chris Lattner0c315472009-12-09 07:08:01 +0000486 if (I->getResult().isClobber() || cdep != 0) {
Owen Anderson168ad692009-10-19 22:14:22 +0000487 cdep = 0;
488 break;
489 }
490
Chris Lattner0c315472009-12-09 07:08:01 +0000491 CallInst *NonLocalDepCall = dyn_cast<CallInst>(I->getResult().getInst());
Owen Anderson168ad692009-10-19 22:14:22 +0000492 // FIXME: All duplicated with non-local case.
Chris Lattner0c315472009-12-09 07:08:01 +0000493 if (NonLocalDepCall && DT->properlyDominates(I->getBB(), C->getParent())){
Owen Anderson168ad692009-10-19 22:14:22 +0000494 cdep = NonLocalDepCall;
495 continue;
496 }
497
498 cdep = 0;
499 break;
500 }
501
502 if (!cdep) {
503 valueNumbering[C] = nextValueNumber;
504 return nextValueNumber++;
505 }
506
507 if (cdep->getNumOperands() != C->getNumOperands()) {
508 valueNumbering[C] = nextValueNumber;
509 return nextValueNumber++;
510 }
Eric Christopher7258dcd2010-04-16 23:37:20 +0000511 for (unsigned i = 1; i < C->getNumOperands(); ++i) {
Owen Anderson168ad692009-10-19 22:14:22 +0000512 uint32_t c_vn = lookup_or_add(C->getOperand(i));
513 uint32_t cd_vn = lookup_or_add(cdep->getOperand(i));
514 if (c_vn != cd_vn) {
515 valueNumbering[C] = nextValueNumber;
516 return nextValueNumber++;
517 }
518 }
519
520 uint32_t v = lookup_or_add(cdep);
521 valueNumbering[C] = v;
522 return v;
523
524 } else {
525 valueNumbering[C] = nextValueNumber;
526 return nextValueNumber++;
527 }
528}
529
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000530/// lookup_or_add - Returns the value number for the specified value, assigning
531/// it a new number if it did not have one before.
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000532uint32_t ValueTable::lookup_or_add(Value *V) {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000533 DenseMap<Value*, uint32_t>::iterator VI = valueNumbering.find(V);
534 if (VI != valueNumbering.end())
535 return VI->second;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000536
Owen Anderson168ad692009-10-19 22:14:22 +0000537 if (!isa<Instruction>(V)) {
Owen Anderson1059b5b2009-10-19 21:14:57 +0000538 valueNumbering[V] = nextValueNumber;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000539 return nextValueNumber++;
540 }
Owen Anderson168ad692009-10-19 22:14:22 +0000541
542 Instruction* I = cast<Instruction>(V);
543 Expression exp;
544 switch (I->getOpcode()) {
545 case Instruction::Call:
546 return lookup_or_add_call(cast<CallInst>(I));
547 case Instruction::Add:
548 case Instruction::FAdd:
549 case Instruction::Sub:
550 case Instruction::FSub:
551 case Instruction::Mul:
552 case Instruction::FMul:
553 case Instruction::UDiv:
554 case Instruction::SDiv:
555 case Instruction::FDiv:
556 case Instruction::URem:
557 case Instruction::SRem:
558 case Instruction::FRem:
559 case Instruction::Shl:
560 case Instruction::LShr:
561 case Instruction::AShr:
562 case Instruction::And:
563 case Instruction::Or :
564 case Instruction::Xor:
565 exp = create_expression(cast<BinaryOperator>(I));
566 break;
567 case Instruction::ICmp:
568 case Instruction::FCmp:
569 exp = create_expression(cast<CmpInst>(I));
570 break;
571 case Instruction::Trunc:
572 case Instruction::ZExt:
573 case Instruction::SExt:
574 case Instruction::FPToUI:
575 case Instruction::FPToSI:
576 case Instruction::UIToFP:
577 case Instruction::SIToFP:
578 case Instruction::FPTrunc:
579 case Instruction::FPExt:
580 case Instruction::PtrToInt:
581 case Instruction::IntToPtr:
582 case Instruction::BitCast:
583 exp = create_expression(cast<CastInst>(I));
584 break;
585 case Instruction::Select:
586 exp = create_expression(cast<SelectInst>(I));
587 break;
588 case Instruction::ExtractElement:
589 exp = create_expression(cast<ExtractElementInst>(I));
590 break;
591 case Instruction::InsertElement:
592 exp = create_expression(cast<InsertElementInst>(I));
593 break;
594 case Instruction::ShuffleVector:
595 exp = create_expression(cast<ShuffleVectorInst>(I));
596 break;
597 case Instruction::ExtractValue:
598 exp = create_expression(cast<ExtractValueInst>(I));
599 break;
600 case Instruction::InsertValue:
601 exp = create_expression(cast<InsertValueInst>(I));
602 break;
603 case Instruction::GetElementPtr:
604 exp = create_expression(cast<GetElementPtrInst>(I));
605 break;
606 default:
607 valueNumbering[V] = nextValueNumber;
608 return nextValueNumber++;
609 }
610
611 uint32_t& e = expressionNumbering[exp];
612 if (!e) e = nextValueNumber++;
613 valueNumbering[V] = e;
614 return e;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000615}
616
617/// lookup - Returns the value number of the specified value. Fails if
618/// the value has not yet been numbered.
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000619uint32_t ValueTable::lookup(Value *V) const {
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +0000620 DenseMap<Value*, uint32_t>::const_iterator VI = valueNumbering.find(V);
Chris Lattner2876a642008-03-21 21:14:38 +0000621 assert(VI != valueNumbering.end() && "Value not numbered?");
622 return VI->second;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000623}
624
625/// clear - Remove all entries from the ValueTable
626void ValueTable::clear() {
627 valueNumbering.clear();
628 expressionNumbering.clear();
629 nextValueNumber = 1;
630}
631
Owen Anderson10ffa862007-07-31 23:27:13 +0000632/// erase - Remove a value from the value numbering
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000633void ValueTable::erase(Value *V) {
Owen Anderson10ffa862007-07-31 23:27:13 +0000634 valueNumbering.erase(V);
635}
636
Bill Wendling6b18a392008-12-22 21:36:08 +0000637/// verifyRemoved - Verify that the value is removed from all internal data
638/// structures.
639void ValueTable::verifyRemoved(const Value *V) const {
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +0000640 for (DenseMap<Value*, uint32_t>::const_iterator
Bill Wendling6b18a392008-12-22 21:36:08 +0000641 I = valueNumbering.begin(), E = valueNumbering.end(); I != E; ++I) {
642 assert(I->first != V && "Inst still occurs in value numbering map!");
643 }
644}
645
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000646//===----------------------------------------------------------------------===//
Bill Wendling456e8852008-12-22 22:32:22 +0000647// GVN Pass
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000648//===----------------------------------------------------------------------===//
649
650namespace {
Chris Lattner2dd09db2009-09-02 06:11:42 +0000651 struct ValueNumberScope {
Owen Anderson1b3ea962008-06-20 01:15:47 +0000652 ValueNumberScope* parent;
653 DenseMap<uint32_t, Value*> table;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000654
Owen Anderson1b3ea962008-06-20 01:15:47 +0000655 ValueNumberScope(ValueNumberScope* p) : parent(p) { }
656 };
657}
658
659namespace {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000660
Chris Lattner2dd09db2009-09-02 06:11:42 +0000661 class GVN : public FunctionPass {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000662 bool runOnFunction(Function &F);
663 public:
664 static char ID; // Pass identification, replacement for typeid
Bob Wilson11361662010-02-28 05:34:05 +0000665 explicit GVN(bool noloads = false)
666 : FunctionPass(&ID), NoLoads(noloads), MD(0) { }
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000667
668 private:
Dan Gohman81132462009-11-14 02:27:51 +0000669 bool NoLoads;
Chris Lattner8541ede2008-12-01 00:40:32 +0000670 MemoryDependenceAnalysis *MD;
671 DominatorTree *DT;
672
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000673 ValueTable VN;
Owen Anderson1b3ea962008-06-20 01:15:47 +0000674 DenseMap<BasicBlock*, ValueNumberScope*> localAvail;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000675
Bob Wilson92cdb6e2010-02-16 19:51:59 +0000676 // List of critical edges to be split between iterations.
677 SmallVector<std::pair<TerminatorInst*, unsigned>, 4> toSplit;
678
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000679 // This transformation requires dominator postdominator info
680 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000681 AU.addRequired<DominatorTree>();
Dan Gohman81132462009-11-14 02:27:51 +0000682 if (!NoLoads)
683 AU.addRequired<MemoryDependenceAnalysis>();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000684 AU.addRequired<AliasAnalysis>();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000685
Owen Anderson54e02192008-06-23 17:49:45 +0000686 AU.addPreserved<DominatorTree>();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000687 AU.addPreserved<AliasAnalysis>();
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000688 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000689
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000690 // Helper fuctions
691 // FIXME: eliminate or document these better
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000692 bool processLoad(LoadInst* L,
Chris Lattner804209d2008-03-21 22:01:16 +0000693 SmallVectorImpl<Instruction*> &toErase);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000694 bool processInstruction(Instruction *I,
Chris Lattner804209d2008-03-21 22:01:16 +0000695 SmallVectorImpl<Instruction*> &toErase);
Owen Anderson9699a6e2007-08-02 18:16:06 +0000696 bool processNonLocalLoad(LoadInst* L,
Chris Lattner804209d2008-03-21 22:01:16 +0000697 SmallVectorImpl<Instruction*> &toErase);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000698 bool processBlock(BasicBlock *BB);
Owen Anderson6a903bc2008-06-18 21:41:49 +0000699 void dump(DenseMap<uint32_t, Value*>& d);
Owen Anderson676070d2007-08-14 18:04:11 +0000700 bool iterateOnFunction(Function &F);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000701 Value *CollapsePhi(PHINode* p);
Owen Anderson6a903bc2008-06-18 21:41:49 +0000702 bool performPRE(Function& F);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000703 Value *lookupNumber(BasicBlock *BB, uint32_t num);
Nuno Lopese3127f32008-10-10 16:25:50 +0000704 void cleanupGlobalSets();
Bill Wendling6b18a392008-12-22 21:36:08 +0000705 void verifyRemoved(const Instruction *I) const;
Bob Wilson92cdb6e2010-02-16 19:51:59 +0000706 bool splitCriticalEdges();
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000707 };
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000708
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000709 char GVN::ID = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000710}
711
712// createGVNPass - The public interface to this file...
Bob Wilson11361662010-02-28 05:34:05 +0000713FunctionPass *llvm::createGVNPass(bool NoLoads) {
714 return new GVN(NoLoads);
Dan Gohman81132462009-11-14 02:27:51 +0000715}
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000716
717static RegisterPass<GVN> X("gvn",
718 "Global Value Numbering");
719
Owen Anderson6a903bc2008-06-18 21:41:49 +0000720void GVN::dump(DenseMap<uint32_t, Value*>& d) {
Dan Gohman57e80862009-12-18 03:25:51 +0000721 errs() << "{\n";
Owen Anderson6a903bc2008-06-18 21:41:49 +0000722 for (DenseMap<uint32_t, Value*>::iterator I = d.begin(),
Owen Anderson5e5599b2007-07-25 19:57:03 +0000723 E = d.end(); I != E; ++I) {
Dan Gohman57e80862009-12-18 03:25:51 +0000724 errs() << I->first << "\n";
Owen Anderson5e5599b2007-07-25 19:57:03 +0000725 I->second->dump();
726 }
Dan Gohman57e80862009-12-18 03:25:51 +0000727 errs() << "}\n";
Owen Anderson5e5599b2007-07-25 19:57:03 +0000728}
729
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000730static bool isSafeReplacement(PHINode* p, Instruction *inst) {
Owen Anderson109ca5a2009-08-26 22:55:11 +0000731 if (!isa<PHINode>(inst))
732 return true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000733
Owen Anderson109ca5a2009-08-26 22:55:11 +0000734 for (Instruction::use_iterator UI = p->use_begin(), E = p->use_end();
735 UI != E; ++UI)
736 if (PHINode* use_phi = dyn_cast<PHINode>(UI))
737 if (use_phi->getParent() == inst->getParent())
738 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000739
Owen Anderson109ca5a2009-08-26 22:55:11 +0000740 return true;
741}
742
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000743Value *GVN::CollapsePhi(PHINode *PN) {
744 Value *ConstVal = PN->hasConstantValue(DT);
745 if (!ConstVal) return 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000746
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000747 Instruction *Inst = dyn_cast<Instruction>(ConstVal);
748 if (!Inst)
749 return ConstVal;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000750
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000751 if (DT->dominates(Inst, PN))
752 if (isSafeReplacement(PN, Inst))
753 return Inst;
Owen Andersonf5023a72007-08-16 22:51:56 +0000754 return 0;
755}
Owen Anderson5e5599b2007-07-25 19:57:03 +0000756
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000757/// IsValueFullyAvailableInBlock - Return true if we can prove that the value
758/// we're analyzing is fully available in the specified block. As we go, keep
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000759/// track of which blocks we know are fully alive in FullyAvailableBlocks. This
760/// map is actually a tri-state map with the following values:
761/// 0) we know the block *is not* fully available.
762/// 1) we know the block *is* fully available.
763/// 2) we do not know whether the block is fully available or not, but we are
764/// currently speculating that it will be.
765/// 3) we are speculating for this block and have used that to speculate for
766/// other blocks.
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000767static bool IsValueFullyAvailableInBlock(BasicBlock *BB,
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000768 DenseMap<BasicBlock*, char> &FullyAvailableBlocks) {
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000769 // Optimistically assume that the block is fully available and check to see
770 // if we already know about this block in one lookup.
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000771 std::pair<DenseMap<BasicBlock*, char>::iterator, char> IV =
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000772 FullyAvailableBlocks.insert(std::make_pair(BB, 2));
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000773
774 // If the entry already existed for this block, return the precomputed value.
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000775 if (!IV.second) {
776 // If this is a speculative "available" value, mark it as being used for
777 // speculation of other blocks.
778 if (IV.first->second == 2)
779 IV.first->second = 3;
780 return IV.first->second != 0;
781 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000782
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000783 // Otherwise, see if it is fully available in all predecessors.
784 pred_iterator PI = pred_begin(BB), PE = pred_end(BB);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000785
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000786 // If this block has no predecessors, it isn't live-in here.
787 if (PI == PE)
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000788 goto SpeculationFailure;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000789
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000790 for (; PI != PE; ++PI)
791 // If the value isn't fully available in one of our predecessors, then it
792 // isn't fully available in this block either. Undo our previous
793 // optimistic assumption and bail out.
794 if (!IsValueFullyAvailableInBlock(*PI, FullyAvailableBlocks))
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000795 goto SpeculationFailure;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000796
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000797 return true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000798
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000799// SpeculationFailure - If we get here, we found out that this is not, after
800// all, a fully-available block. We have a problem if we speculated on this and
801// used the speculation to mark other blocks as available.
802SpeculationFailure:
803 char &BBVal = FullyAvailableBlocks[BB];
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000804
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000805 // If we didn't speculate on this, just return with it set to false.
806 if (BBVal == 2) {
807 BBVal = 0;
808 return false;
809 }
810
811 // If we did speculate on this value, we could have blocks set to 1 that are
812 // incorrect. Walk the (transitive) successors of this block and mark them as
813 // 0 if set to one.
814 SmallVector<BasicBlock*, 32> BBWorklist;
815 BBWorklist.push_back(BB);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000816
Dan Gohman28943872010-01-05 16:27:25 +0000817 do {
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000818 BasicBlock *Entry = BBWorklist.pop_back_val();
819 // Note that this sets blocks to 0 (unavailable) if they happen to not
820 // already be in FullyAvailableBlocks. This is safe.
821 char &EntryVal = FullyAvailableBlocks[Entry];
822 if (EntryVal == 0) continue; // Already unavailable.
823
824 // Mark as unavailable.
825 EntryVal = 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000826
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000827 for (succ_iterator I = succ_begin(Entry), E = succ_end(Entry); I != E; ++I)
828 BBWorklist.push_back(*I);
Dan Gohman28943872010-01-05 16:27:25 +0000829 } while (!BBWorklist.empty());
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000830
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000831 return false;
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000832}
833
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000834
Chris Lattner9045f232009-09-21 17:24:04 +0000835/// CanCoerceMustAliasedValueToLoad - Return true if
836/// CoerceAvailableValueToLoadType will succeed.
837static bool CanCoerceMustAliasedValueToLoad(Value *StoredVal,
838 const Type *LoadTy,
839 const TargetData &TD) {
840 // If the loaded or stored value is an first class array or struct, don't try
841 // to transform them. We need to be able to bitcast to integer.
Duncan Sands19d0b472010-02-16 11:11:14 +0000842 if (LoadTy->isStructTy() || LoadTy->isArrayTy() ||
843 StoredVal->getType()->isStructTy() ||
844 StoredVal->getType()->isArrayTy())
Chris Lattner9045f232009-09-21 17:24:04 +0000845 return false;
846
847 // The store has to be at least as big as the load.
848 if (TD.getTypeSizeInBits(StoredVal->getType()) <
849 TD.getTypeSizeInBits(LoadTy))
850 return false;
851
852 return true;
853}
854
855
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000856/// CoerceAvailableValueToLoadType - If we saw a store of a value to memory, and
857/// then a load from a must-aliased pointer of a different type, try to coerce
858/// the stored value. LoadedTy is the type of the load we want to replace and
859/// InsertPt is the place to insert new instructions.
860///
861/// If we can't do it, return null.
862static Value *CoerceAvailableValueToLoadType(Value *StoredVal,
863 const Type *LoadedTy,
864 Instruction *InsertPt,
865 const TargetData &TD) {
Chris Lattner9045f232009-09-21 17:24:04 +0000866 if (!CanCoerceMustAliasedValueToLoad(StoredVal, LoadedTy, TD))
867 return 0;
868
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000869 const Type *StoredValTy = StoredVal->getType();
870
871 uint64_t StoreSize = TD.getTypeSizeInBits(StoredValTy);
872 uint64_t LoadSize = TD.getTypeSizeInBits(LoadedTy);
873
874 // If the store and reload are the same size, we can always reuse it.
875 if (StoreSize == LoadSize) {
Duncan Sands19d0b472010-02-16 11:11:14 +0000876 if (StoredValTy->isPointerTy() && LoadedTy->isPointerTy()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000877 // Pointer to Pointer -> use bitcast.
878 return new BitCastInst(StoredVal, LoadedTy, "", InsertPt);
879 }
880
881 // Convert source pointers to integers, which can be bitcast.
Duncan Sands19d0b472010-02-16 11:11:14 +0000882 if (StoredValTy->isPointerTy()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000883 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
884 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
885 }
886
887 const Type *TypeToCastTo = LoadedTy;
Duncan Sands19d0b472010-02-16 11:11:14 +0000888 if (TypeToCastTo->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000889 TypeToCastTo = TD.getIntPtrType(StoredValTy->getContext());
890
891 if (StoredValTy != TypeToCastTo)
892 StoredVal = new BitCastInst(StoredVal, TypeToCastTo, "", InsertPt);
893
894 // Cast to pointer if the load needs a pointer type.
Duncan Sands19d0b472010-02-16 11:11:14 +0000895 if (LoadedTy->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000896 StoredVal = new IntToPtrInst(StoredVal, LoadedTy, "", InsertPt);
897
898 return StoredVal;
899 }
900
901 // If the loaded value is smaller than the available value, then we can
902 // extract out a piece from it. If the available value is too small, then we
903 // can't do anything.
Chris Lattner9045f232009-09-21 17:24:04 +0000904 assert(StoreSize >= LoadSize && "CanCoerceMustAliasedValueToLoad fail");
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000905
906 // Convert source pointers to integers, which can be manipulated.
Duncan Sands19d0b472010-02-16 11:11:14 +0000907 if (StoredValTy->isPointerTy()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000908 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
909 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
910 }
911
912 // Convert vectors and fp to integer, which can be manipulated.
Duncan Sands19d0b472010-02-16 11:11:14 +0000913 if (!StoredValTy->isIntegerTy()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000914 StoredValTy = IntegerType::get(StoredValTy->getContext(), StoreSize);
915 StoredVal = new BitCastInst(StoredVal, StoredValTy, "", InsertPt);
916 }
917
918 // If this is a big-endian system, we need to shift the value down to the low
919 // bits so that a truncate will work.
920 if (TD.isBigEndian()) {
921 Constant *Val = ConstantInt::get(StoredVal->getType(), StoreSize-LoadSize);
922 StoredVal = BinaryOperator::CreateLShr(StoredVal, Val, "tmp", InsertPt);
923 }
924
925 // Truncate the integer to the right size now.
926 const Type *NewIntTy = IntegerType::get(StoredValTy->getContext(), LoadSize);
927 StoredVal = new TruncInst(StoredVal, NewIntTy, "trunc", InsertPt);
928
929 if (LoadedTy == NewIntTy)
930 return StoredVal;
931
932 // If the result is a pointer, inttoptr.
Duncan Sands19d0b472010-02-16 11:11:14 +0000933 if (LoadedTy->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000934 return new IntToPtrInst(StoredVal, LoadedTy, "inttoptr", InsertPt);
935
936 // Otherwise, bitcast.
937 return new BitCastInst(StoredVal, LoadedTy, "bitcast", InsertPt);
938}
939
Chris Lattnerd28f9082009-09-21 06:24:16 +0000940/// GetBaseWithConstantOffset - Analyze the specified pointer to see if it can
941/// be expressed as a base pointer plus a constant offset. Return the base and
942/// offset to the caller.
943static Value *GetBaseWithConstantOffset(Value *Ptr, int64_t &Offset,
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000944 const TargetData &TD) {
Chris Lattnerd28f9082009-09-21 06:24:16 +0000945 Operator *PtrOp = dyn_cast<Operator>(Ptr);
946 if (PtrOp == 0) return Ptr;
947
948 // Just look through bitcasts.
949 if (PtrOp->getOpcode() == Instruction::BitCast)
950 return GetBaseWithConstantOffset(PtrOp->getOperand(0), Offset, TD);
951
952 // If this is a GEP with constant indices, we can look through it.
953 GEPOperator *GEP = dyn_cast<GEPOperator>(PtrOp);
954 if (GEP == 0 || !GEP->hasAllConstantIndices()) return Ptr;
955
956 gep_type_iterator GTI = gep_type_begin(GEP);
957 for (User::op_iterator I = GEP->idx_begin(), E = GEP->idx_end(); I != E;
958 ++I, ++GTI) {
959 ConstantInt *OpC = cast<ConstantInt>(*I);
960 if (OpC->isZero()) continue;
961
962 // Handle a struct and array indices which add their offset to the pointer.
963 if (const StructType *STy = dyn_cast<StructType>(*GTI)) {
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000964 Offset += TD.getStructLayout(STy)->getElementOffset(OpC->getZExtValue());
Chris Lattnerd28f9082009-09-21 06:24:16 +0000965 } else {
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000966 uint64_t Size = TD.getTypeAllocSize(GTI.getIndexedType());
Chris Lattnerd28f9082009-09-21 06:24:16 +0000967 Offset += OpC->getSExtValue()*Size;
968 }
969 }
970
971 // Re-sign extend from the pointer size if needed to get overflow edge cases
972 // right.
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000973 unsigned PtrSize = TD.getPointerSizeInBits();
Chris Lattnerd28f9082009-09-21 06:24:16 +0000974 if (PtrSize < 64)
975 Offset = (Offset << (64-PtrSize)) >> (64-PtrSize);
976
977 return GetBaseWithConstantOffset(GEP->getPointerOperand(), Offset, TD);
978}
979
980
Chris Lattner42376062009-12-06 01:57:02 +0000981/// AnalyzeLoadFromClobberingWrite - This function is called when we have a
982/// memdep query of a load that ends up being a clobbering memory write (store,
983/// memset, memcpy, memmove). This means that the write *may* provide bits used
984/// by the load but we can't be sure because the pointers don't mustalias.
985///
986/// Check this case to see if there is anything more we can do before we give
987/// up. This returns -1 if we have to give up, or a byte number in the stored
988/// value of the piece that feeds the load.
Chris Lattner0def8612009-12-09 07:34:10 +0000989static int AnalyzeLoadFromClobberingWrite(const Type *LoadTy, Value *LoadPtr,
990 Value *WritePtr,
Chris Lattner42376062009-12-06 01:57:02 +0000991 uint64_t WriteSizeInBits,
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000992 const TargetData &TD) {
Chris Lattner9045f232009-09-21 17:24:04 +0000993 // If the loaded or stored value is an first class array or struct, don't try
994 // to transform them. We need to be able to bitcast to integer.
Duncan Sands19d0b472010-02-16 11:11:14 +0000995 if (LoadTy->isStructTy() || LoadTy->isArrayTy())
Chris Lattner9045f232009-09-21 17:24:04 +0000996 return -1;
997
Chris Lattnerd28f9082009-09-21 06:24:16 +0000998 int64_t StoreOffset = 0, LoadOffset = 0;
Chris Lattner42376062009-12-06 01:57:02 +0000999 Value *StoreBase = GetBaseWithConstantOffset(WritePtr, StoreOffset, TD);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001000 Value *LoadBase =
Chris Lattner0def8612009-12-09 07:34:10 +00001001 GetBaseWithConstantOffset(LoadPtr, LoadOffset, TD);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001002 if (StoreBase != LoadBase)
1003 return -1;
1004
1005 // If the load and store are to the exact same address, they should have been
1006 // a must alias. AA must have gotten confused.
Chris Lattner05638042010-03-25 05:58:19 +00001007 // FIXME: Study to see if/when this happens. One case is forwarding a memset
1008 // to a load from the base of the memset.
Chris Lattnerd28f9082009-09-21 06:24:16 +00001009#if 0
Chris Lattner05638042010-03-25 05:58:19 +00001010 if (LoadOffset == StoreOffset) {
David Greene2e6efc42010-01-05 01:27:17 +00001011 dbgs() << "STORE/LOAD DEP WITH COMMON POINTER MISSED:\n"
Chris Lattnerd28f9082009-09-21 06:24:16 +00001012 << "Base = " << *StoreBase << "\n"
Chris Lattner42376062009-12-06 01:57:02 +00001013 << "Store Ptr = " << *WritePtr << "\n"
1014 << "Store Offs = " << StoreOffset << "\n"
Chris Lattner3ddf8042009-12-10 00:04:46 +00001015 << "Load Ptr = " << *LoadPtr << "\n";
Chris Lattner946b58d2009-12-09 02:41:54 +00001016 abort();
Chris Lattnerd28f9082009-09-21 06:24:16 +00001017 }
Chris Lattner05638042010-03-25 05:58:19 +00001018#endif
Chris Lattnerd28f9082009-09-21 06:24:16 +00001019
1020 // If the load and store don't overlap at all, the store doesn't provide
1021 // anything to the load. In this case, they really don't alias at all, AA
1022 // must have gotten confused.
1023 // FIXME: Investigate cases where this bails out, e.g. rdar://7238614. Then
1024 // remove this check, as it is duplicated with what we have below.
Chris Lattner0def8612009-12-09 07:34:10 +00001025 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001026
Chris Lattner42376062009-12-06 01:57:02 +00001027 if ((WriteSizeInBits & 7) | (LoadSize & 7))
Chris Lattnerd28f9082009-09-21 06:24:16 +00001028 return -1;
Chris Lattner42376062009-12-06 01:57:02 +00001029 uint64_t StoreSize = WriteSizeInBits >> 3; // Convert to bytes.
Chris Lattnerd28f9082009-09-21 06:24:16 +00001030 LoadSize >>= 3;
1031
1032
1033 bool isAAFailure = false;
Chris Lattner05638042010-03-25 05:58:19 +00001034 if (StoreOffset < LoadOffset)
Chris Lattnerd28f9082009-09-21 06:24:16 +00001035 isAAFailure = StoreOffset+int64_t(StoreSize) <= LoadOffset;
Chris Lattner05638042010-03-25 05:58:19 +00001036 else
Chris Lattnerd28f9082009-09-21 06:24:16 +00001037 isAAFailure = LoadOffset+int64_t(LoadSize) <= StoreOffset;
Chris Lattner05638042010-03-25 05:58:19 +00001038
Chris Lattnerd28f9082009-09-21 06:24:16 +00001039 if (isAAFailure) {
1040#if 0
David Greene2e6efc42010-01-05 01:27:17 +00001041 dbgs() << "STORE LOAD DEP WITH COMMON BASE:\n"
Chris Lattnerd28f9082009-09-21 06:24:16 +00001042 << "Base = " << *StoreBase << "\n"
Chris Lattner42376062009-12-06 01:57:02 +00001043 << "Store Ptr = " << *WritePtr << "\n"
1044 << "Store Offs = " << StoreOffset << "\n"
Chris Lattner3ddf8042009-12-10 00:04:46 +00001045 << "Load Ptr = " << *LoadPtr << "\n";
Chris Lattner946b58d2009-12-09 02:41:54 +00001046 abort();
Chris Lattnerd28f9082009-09-21 06:24:16 +00001047#endif
1048 return -1;
1049 }
1050
1051 // If the Load isn't completely contained within the stored bits, we don't
1052 // have all the bits to feed it. We could do something crazy in the future
1053 // (issue a smaller load then merge the bits in) but this seems unlikely to be
1054 // valuable.
1055 if (StoreOffset > LoadOffset ||
1056 StoreOffset+StoreSize < LoadOffset+LoadSize)
1057 return -1;
1058
1059 // Okay, we can do this transformation. Return the number of bytes into the
1060 // store that the load is.
1061 return LoadOffset-StoreOffset;
1062}
1063
Chris Lattner42376062009-12-06 01:57:02 +00001064/// AnalyzeLoadFromClobberingStore - This function is called when we have a
1065/// memdep query of a load that ends up being a clobbering store.
Chris Lattner07df9ef2009-12-09 07:37:07 +00001066static int AnalyzeLoadFromClobberingStore(const Type *LoadTy, Value *LoadPtr,
1067 StoreInst *DepSI,
Chris Lattner42376062009-12-06 01:57:02 +00001068 const TargetData &TD) {
1069 // Cannot handle reading from store of first-class aggregate yet.
Duncan Sands19d0b472010-02-16 11:11:14 +00001070 if (DepSI->getOperand(0)->getType()->isStructTy() ||
1071 DepSI->getOperand(0)->getType()->isArrayTy())
Chris Lattner42376062009-12-06 01:57:02 +00001072 return -1;
1073
1074 Value *StorePtr = DepSI->getPointerOperand();
Chris Lattner9ccc8792009-12-10 00:11:45 +00001075 uint64_t StoreSize = TD.getTypeSizeInBits(DepSI->getOperand(0)->getType());
Chris Lattner07df9ef2009-12-09 07:37:07 +00001076 return AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr,
Chris Lattner0def8612009-12-09 07:34:10 +00001077 StorePtr, StoreSize, TD);
Chris Lattner42376062009-12-06 01:57:02 +00001078}
1079
Chris Lattner07df9ef2009-12-09 07:37:07 +00001080static int AnalyzeLoadFromClobberingMemInst(const Type *LoadTy, Value *LoadPtr,
1081 MemIntrinsic *MI,
Chris Lattner42376062009-12-06 01:57:02 +00001082 const TargetData &TD) {
1083 // If the mem operation is a non-constant size, we can't handle it.
1084 ConstantInt *SizeCst = dyn_cast<ConstantInt>(MI->getLength());
1085 if (SizeCst == 0) return -1;
1086 uint64_t MemSizeInBits = SizeCst->getZExtValue()*8;
Chris Lattner778cb922009-12-06 05:29:56 +00001087
1088 // If this is memset, we just need to see if the offset is valid in the size
1089 // of the memset..
Chris Lattner42376062009-12-06 01:57:02 +00001090 if (MI->getIntrinsicID() == Intrinsic::memset)
Chris Lattner07df9ef2009-12-09 07:37:07 +00001091 return AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr, MI->getDest(),
1092 MemSizeInBits, TD);
Chris Lattner42376062009-12-06 01:57:02 +00001093
Chris Lattner778cb922009-12-06 05:29:56 +00001094 // If we have a memcpy/memmove, the only case we can handle is if this is a
1095 // copy from constant memory. In that case, we can read directly from the
1096 // constant memory.
1097 MemTransferInst *MTI = cast<MemTransferInst>(MI);
1098
1099 Constant *Src = dyn_cast<Constant>(MTI->getSource());
1100 if (Src == 0) return -1;
1101
1102 GlobalVariable *GV = dyn_cast<GlobalVariable>(Src->getUnderlyingObject());
1103 if (GV == 0 || !GV->isConstant()) return -1;
1104
1105 // See if the access is within the bounds of the transfer.
Chris Lattner07df9ef2009-12-09 07:37:07 +00001106 int Offset = AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr,
1107 MI->getDest(), MemSizeInBits, TD);
Chris Lattner778cb922009-12-06 05:29:56 +00001108 if (Offset == -1)
1109 return Offset;
1110
1111 // Otherwise, see if we can constant fold a load from the constant with the
1112 // offset applied as appropriate.
1113 Src = ConstantExpr::getBitCast(Src,
1114 llvm::Type::getInt8PtrTy(Src->getContext()));
1115 Constant *OffsetCst =
1116 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1117 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
Chris Lattner07df9ef2009-12-09 07:37:07 +00001118 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(LoadTy));
Chris Lattner778cb922009-12-06 05:29:56 +00001119 if (ConstantFoldLoadFromConstPtr(Src, &TD))
1120 return Offset;
Chris Lattner42376062009-12-06 01:57:02 +00001121 return -1;
1122}
1123
Chris Lattnerd28f9082009-09-21 06:24:16 +00001124
1125/// GetStoreValueForLoad - This function is called when we have a
1126/// memdep query of a load that ends up being a clobbering store. This means
1127/// that the store *may* provide bits used by the load but we can't be sure
1128/// because the pointers don't mustalias. Check this case to see if there is
1129/// anything more we can do before we give up.
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001130static Value *GetStoreValueForLoad(Value *SrcVal, unsigned Offset,
1131 const Type *LoadTy,
1132 Instruction *InsertPt, const TargetData &TD){
Chris Lattnerd28f9082009-09-21 06:24:16 +00001133 LLVMContext &Ctx = SrcVal->getType()->getContext();
1134
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001135 uint64_t StoreSize = TD.getTypeSizeInBits(SrcVal->getType())/8;
1136 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy)/8;
Chris Lattnerd28f9082009-09-21 06:24:16 +00001137
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001138 IRBuilder<> Builder(InsertPt->getParent(), InsertPt);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001139
1140 // Compute which bits of the stored value are being used by the load. Convert
1141 // to an integer type to start with.
Duncan Sands19d0b472010-02-16 11:11:14 +00001142 if (SrcVal->getType()->isPointerTy())
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001143 SrcVal = Builder.CreatePtrToInt(SrcVal, TD.getIntPtrType(Ctx), "tmp");
Duncan Sands19d0b472010-02-16 11:11:14 +00001144 if (!SrcVal->getType()->isIntegerTy())
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001145 SrcVal = Builder.CreateBitCast(SrcVal, IntegerType::get(Ctx, StoreSize*8),
1146 "tmp");
Chris Lattnerd28f9082009-09-21 06:24:16 +00001147
1148 // Shift the bits to the least significant depending on endianness.
1149 unsigned ShiftAmt;
Chris Lattner42376062009-12-06 01:57:02 +00001150 if (TD.isLittleEndian())
Chris Lattnerd28f9082009-09-21 06:24:16 +00001151 ShiftAmt = Offset*8;
Chris Lattner42376062009-12-06 01:57:02 +00001152 else
Chris Lattner24705382009-09-21 17:55:47 +00001153 ShiftAmt = (StoreSize-LoadSize-Offset)*8;
Chris Lattnerd28f9082009-09-21 06:24:16 +00001154
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001155 if (ShiftAmt)
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001156 SrcVal = Builder.CreateLShr(SrcVal, ShiftAmt, "tmp");
Chris Lattnerd28f9082009-09-21 06:24:16 +00001157
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001158 if (LoadSize != StoreSize)
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001159 SrcVal = Builder.CreateTrunc(SrcVal, IntegerType::get(Ctx, LoadSize*8),
1160 "tmp");
Chris Lattnerd28f9082009-09-21 06:24:16 +00001161
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001162 return CoerceAvailableValueToLoadType(SrcVal, LoadTy, InsertPt, TD);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001163}
1164
Chris Lattner42376062009-12-06 01:57:02 +00001165/// GetMemInstValueForLoad - This function is called when we have a
1166/// memdep query of a load that ends up being a clobbering mem intrinsic.
1167static Value *GetMemInstValueForLoad(MemIntrinsic *SrcInst, unsigned Offset,
1168 const Type *LoadTy, Instruction *InsertPt,
1169 const TargetData &TD){
1170 LLVMContext &Ctx = LoadTy->getContext();
1171 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy)/8;
1172
1173 IRBuilder<> Builder(InsertPt->getParent(), InsertPt);
1174
1175 // We know that this method is only called when the mem transfer fully
1176 // provides the bits for the load.
1177 if (MemSetInst *MSI = dyn_cast<MemSetInst>(SrcInst)) {
1178 // memset(P, 'x', 1234) -> splat('x'), even if x is a variable, and
1179 // independently of what the offset is.
1180 Value *Val = MSI->getValue();
1181 if (LoadSize != 1)
1182 Val = Builder.CreateZExt(Val, IntegerType::get(Ctx, LoadSize*8));
1183
1184 Value *OneElt = Val;
1185
1186 // Splat the value out to the right number of bits.
1187 for (unsigned NumBytesSet = 1; NumBytesSet != LoadSize; ) {
1188 // If we can double the number of bytes set, do it.
1189 if (NumBytesSet*2 <= LoadSize) {
1190 Value *ShVal = Builder.CreateShl(Val, NumBytesSet*8);
1191 Val = Builder.CreateOr(Val, ShVal);
1192 NumBytesSet <<= 1;
1193 continue;
1194 }
1195
1196 // Otherwise insert one byte at a time.
1197 Value *ShVal = Builder.CreateShl(Val, 1*8);
1198 Val = Builder.CreateOr(OneElt, ShVal);
1199 ++NumBytesSet;
1200 }
1201
1202 return CoerceAvailableValueToLoadType(Val, LoadTy, InsertPt, TD);
1203 }
Chris Lattner778cb922009-12-06 05:29:56 +00001204
1205 // Otherwise, this is a memcpy/memmove from a constant global.
1206 MemTransferInst *MTI = cast<MemTransferInst>(SrcInst);
1207 Constant *Src = cast<Constant>(MTI->getSource());
1208
1209 // Otherwise, see if we can constant fold a load from the constant with the
1210 // offset applied as appropriate.
1211 Src = ConstantExpr::getBitCast(Src,
1212 llvm::Type::getInt8PtrTy(Src->getContext()));
1213 Constant *OffsetCst =
1214 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1215 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
1216 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(LoadTy));
1217 return ConstantFoldLoadFromConstPtr(Src, &TD);
Chris Lattner42376062009-12-06 01:57:02 +00001218}
1219
Dan Gohmanb29cda92010-04-15 17:08:50 +00001220namespace {
Chris Lattner42376062009-12-06 01:57:02 +00001221
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001222struct AvailableValueInBlock {
1223 /// BB - The basic block in question.
1224 BasicBlock *BB;
Chris Lattner93236ba2009-12-06 04:54:31 +00001225 enum ValType {
1226 SimpleVal, // A simple offsetted value that is accessed.
1227 MemIntrin // A memory intrinsic which is loaded from.
1228 };
1229
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001230 /// V - The value that is live out of the block.
Chris Lattner93236ba2009-12-06 04:54:31 +00001231 PointerIntPair<Value *, 1, ValType> Val;
1232
1233 /// Offset - The byte offset in Val that is interesting for the load query.
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001234 unsigned Offset;
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001235
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001236 static AvailableValueInBlock get(BasicBlock *BB, Value *V,
1237 unsigned Offset = 0) {
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001238 AvailableValueInBlock Res;
1239 Res.BB = BB;
Chris Lattner93236ba2009-12-06 04:54:31 +00001240 Res.Val.setPointer(V);
1241 Res.Val.setInt(SimpleVal);
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001242 Res.Offset = Offset;
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001243 return Res;
1244 }
Chris Lattner93236ba2009-12-06 04:54:31 +00001245
1246 static AvailableValueInBlock getMI(BasicBlock *BB, MemIntrinsic *MI,
1247 unsigned Offset = 0) {
1248 AvailableValueInBlock Res;
1249 Res.BB = BB;
1250 Res.Val.setPointer(MI);
1251 Res.Val.setInt(MemIntrin);
1252 Res.Offset = Offset;
1253 return Res;
1254 }
1255
1256 bool isSimpleValue() const { return Val.getInt() == SimpleVal; }
1257 Value *getSimpleValue() const {
1258 assert(isSimpleValue() && "Wrong accessor");
1259 return Val.getPointer();
1260 }
1261
1262 MemIntrinsic *getMemIntrinValue() const {
1263 assert(!isSimpleValue() && "Wrong accessor");
1264 return cast<MemIntrinsic>(Val.getPointer());
1265 }
Chris Lattner927b0ac2009-12-21 23:04:33 +00001266
1267 /// MaterializeAdjustedValue - Emit code into this block to adjust the value
1268 /// defined here to the specified type. This handles various coercion cases.
1269 Value *MaterializeAdjustedValue(const Type *LoadTy,
1270 const TargetData *TD) const {
1271 Value *Res;
1272 if (isSimpleValue()) {
1273 Res = getSimpleValue();
1274 if (Res->getType() != LoadTy) {
1275 assert(TD && "Need target data to handle type mismatch case");
1276 Res = GetStoreValueForLoad(Res, Offset, LoadTy, BB->getTerminator(),
1277 *TD);
1278
1279 DEBUG(errs() << "GVN COERCED NONLOCAL VAL:\nOffset: " << Offset << " "
1280 << *getSimpleValue() << '\n'
1281 << *Res << '\n' << "\n\n\n");
1282 }
1283 } else {
1284 Res = GetMemInstValueForLoad(getMemIntrinValue(), Offset,
1285 LoadTy, BB->getTerminator(), *TD);
1286 DEBUG(errs() << "GVN COERCED NONLOCAL MEM INTRIN:\nOffset: " << Offset
1287 << " " << *getMemIntrinValue() << '\n'
1288 << *Res << '\n' << "\n\n\n");
1289 }
1290 return Res;
1291 }
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001292};
1293
Dan Gohmanb29cda92010-04-15 17:08:50 +00001294}
1295
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001296/// ConstructSSAForLoadSet - Given a set of loads specified by ValuesPerBlock,
1297/// construct SSA form, allowing us to eliminate LI. This returns the value
1298/// that should be used at LI's definition site.
1299static Value *ConstructSSAForLoadSet(LoadInst *LI,
1300 SmallVectorImpl<AvailableValueInBlock> &ValuesPerBlock,
1301 const TargetData *TD,
Chris Lattnerbf200182009-12-21 23:15:48 +00001302 const DominatorTree &DT,
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001303 AliasAnalysis *AA) {
Chris Lattnerbf200182009-12-21 23:15:48 +00001304 // Check for the fully redundant, dominating load case. In this case, we can
1305 // just use the dominating value directly.
1306 if (ValuesPerBlock.size() == 1 &&
1307 DT.properlyDominates(ValuesPerBlock[0].BB, LI->getParent()))
1308 return ValuesPerBlock[0].MaterializeAdjustedValue(LI->getType(), TD);
1309
1310 // Otherwise, we have to construct SSA form.
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001311 SmallVector<PHINode*, 8> NewPHIs;
1312 SSAUpdater SSAUpdate(&NewPHIs);
1313 SSAUpdate.Initialize(LI);
1314
1315 const Type *LoadTy = LI->getType();
1316
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001317 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
Chris Lattner93236ba2009-12-06 04:54:31 +00001318 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1319 BasicBlock *BB = AV.BB;
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001320
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001321 if (SSAUpdate.HasValueForBlock(BB))
1322 continue;
Chris Lattner93236ba2009-12-06 04:54:31 +00001323
Chris Lattner927b0ac2009-12-21 23:04:33 +00001324 SSAUpdate.AddAvailableValue(BB, AV.MaterializeAdjustedValue(LoadTy, TD));
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001325 }
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001326
1327 // Perform PHI construction.
1328 Value *V = SSAUpdate.GetValueInMiddleOfBlock(LI->getParent());
1329
1330 // If new PHI nodes were created, notify alias analysis.
Duncan Sands19d0b472010-02-16 11:11:14 +00001331 if (V->getType()->isPointerTy())
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001332 for (unsigned i = 0, e = NewPHIs.size(); i != e; ++i)
1333 AA->copyValue(LI, NewPHIs[i]);
1334
1335 return V;
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001336}
1337
Gabor Greifce6dd882010-04-09 10:57:00 +00001338static bool isLifetimeStart(const Instruction *Inst) {
1339 if (const IntrinsicInst* II = dyn_cast<IntrinsicInst>(Inst))
Owen Andersonb9878ee2009-12-02 07:35:19 +00001340 return II->getIntrinsicID() == Intrinsic::lifetime_start;
Chris Lattnerc4680252009-12-02 06:44:58 +00001341 return false;
1342}
1343
Owen Anderson221a4362007-08-16 22:02:55 +00001344/// processNonLocalLoad - Attempt to eliminate a load whose dependencies are
1345/// non-local by performing PHI construction.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001346bool GVN::processNonLocalLoad(LoadInst *LI,
Chris Lattner804209d2008-03-21 22:01:16 +00001347 SmallVectorImpl<Instruction*> &toErase) {
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001348 // Find the non-local dependencies of the load.
Chris Lattner9b7d99e2009-12-22 04:25:02 +00001349 SmallVector<NonLocalDepResult, 64> Deps;
Chris Lattnerb6fc4b82008-12-09 19:25:07 +00001350 MD->getNonLocalPointerDependency(LI->getOperand(0), true, LI->getParent(),
1351 Deps);
David Greene2e6efc42010-01-05 01:27:17 +00001352 //DEBUG(dbgs() << "INVESTIGATING NONLOCAL LOAD: "
Dan Gohmanef3ef7f2009-07-31 20:24:18 +00001353 // << Deps.size() << *LI << '\n');
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001354
Owen Andersonb39e0de2008-08-26 22:07:42 +00001355 // If we had to process more than one hundred blocks to find the
1356 // dependencies, this load isn't worth worrying about. Optimizing
1357 // it will be too expensive.
Chris Lattnerb6fc4b82008-12-09 19:25:07 +00001358 if (Deps.size() > 100)
Owen Andersonb39e0de2008-08-26 22:07:42 +00001359 return false;
Chris Lattnerb6372932008-12-18 00:51:32 +00001360
1361 // If we had a phi translation failure, we'll have a single entry which is a
1362 // clobber in the current block. Reject this early.
Chris Lattner0c315472009-12-09 07:08:01 +00001363 if (Deps.size() == 1 && Deps[0].getResult().isClobber()) {
Torok Edwinba93ea72009-06-17 18:48:18 +00001364 DEBUG(
David Greene2e6efc42010-01-05 01:27:17 +00001365 dbgs() << "GVN: non-local load ";
1366 WriteAsOperand(dbgs(), LI);
1367 dbgs() << " is clobbered by " << *Deps[0].getResult().getInst() << '\n';
Torok Edwinba93ea72009-06-17 18:48:18 +00001368 );
Chris Lattnerb6372932008-12-18 00:51:32 +00001369 return false;
Torok Edwinba93ea72009-06-17 18:48:18 +00001370 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001371
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001372 // Filter out useless results (non-locals, etc). Keep track of the blocks
1373 // where we have a value available in repl, also keep track of whether we see
1374 // dependencies that produce an unknown value for the load (such as a call
1375 // that could potentially clobber the load).
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001376 SmallVector<AvailableValueInBlock, 16> ValuesPerBlock;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001377 SmallVector<BasicBlock*, 16> UnavailableBlocks;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001378
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001379 const TargetData *TD = 0;
1380
Chris Lattnerb6fc4b82008-12-09 19:25:07 +00001381 for (unsigned i = 0, e = Deps.size(); i != e; ++i) {
Chris Lattner0c315472009-12-09 07:08:01 +00001382 BasicBlock *DepBB = Deps[i].getBB();
1383 MemDepResult DepInfo = Deps[i].getResult();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001384
Chris Lattner0e3d6332008-12-05 21:04:20 +00001385 if (DepInfo.isClobber()) {
Chris Lattnerca5f9cb2009-12-09 18:21:46 +00001386 // The address being loaded in this non-local block may not be the same as
1387 // the pointer operand of the load if PHI translation occurs. Make sure
1388 // to consider the right address.
1389 Value *Address = Deps[i].getAddress();
1390
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001391 // If the dependence is to a store that writes to a superset of the bits
1392 // read by the load, we can extract the bits we need for the load from the
1393 // stored value.
1394 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInfo.getInst())) {
1395 if (TD == 0)
1396 TD = getAnalysisIfAvailable<TargetData>();
Chris Lattnerca5f9cb2009-12-09 18:21:46 +00001397 if (TD && Address) {
1398 int Offset = AnalyzeLoadFromClobberingStore(LI->getType(), Address,
Chris Lattner07df9ef2009-12-09 07:37:07 +00001399 DepSI, *TD);
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001400 if (Offset != -1) {
1401 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1402 DepSI->getOperand(0),
1403 Offset));
1404 continue;
1405 }
1406 }
1407 }
Chris Lattner42376062009-12-06 01:57:02 +00001408
Chris Lattner42376062009-12-06 01:57:02 +00001409 // If the clobbering value is a memset/memcpy/memmove, see if we can
1410 // forward a value on from it.
Chris Lattner93236ba2009-12-06 04:54:31 +00001411 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(DepInfo.getInst())) {
Chris Lattner42376062009-12-06 01:57:02 +00001412 if (TD == 0)
1413 TD = getAnalysisIfAvailable<TargetData>();
Chris Lattnerca5f9cb2009-12-09 18:21:46 +00001414 if (TD && Address) {
1415 int Offset = AnalyzeLoadFromClobberingMemInst(LI->getType(), Address,
Chris Lattner07df9ef2009-12-09 07:37:07 +00001416 DepMI, *TD);
Chris Lattner93236ba2009-12-06 04:54:31 +00001417 if (Offset != -1) {
1418 ValuesPerBlock.push_back(AvailableValueInBlock::getMI(DepBB, DepMI,
1419 Offset));
1420 continue;
1421 }
Chris Lattner42376062009-12-06 01:57:02 +00001422 }
1423 }
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001424
Chris Lattner0e3d6332008-12-05 21:04:20 +00001425 UnavailableBlocks.push_back(DepBB);
1426 continue;
1427 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001428
Chris Lattner0e3d6332008-12-05 21:04:20 +00001429 Instruction *DepInst = DepInfo.getInst();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001430
Chris Lattner0e3d6332008-12-05 21:04:20 +00001431 // Loading the allocation -> undef.
Chris Lattnerc4680252009-12-02 06:44:58 +00001432 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst) ||
Owen Andersonb9878ee2009-12-02 07:35:19 +00001433 // Loading immediately after lifetime begin -> undef.
1434 isLifetimeStart(DepInst)) {
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001435 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1436 UndefValue::get(LI->getType())));
Chris Lattner7e61daf2008-12-01 01:15:42 +00001437 continue;
1438 }
Owen Anderson2b2bd282009-10-28 07:05:35 +00001439
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001440 if (StoreInst *S = dyn_cast<StoreInst>(DepInst)) {
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001441 // Reject loads and stores that are to the same address but are of
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001442 // different types if we have to.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001443 if (S->getOperand(0)->getType() != LI->getType()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001444 if (TD == 0)
1445 TD = getAnalysisIfAvailable<TargetData>();
1446
1447 // If the stored value is larger or equal to the loaded value, we can
1448 // reuse it.
Chris Lattner9045f232009-09-21 17:24:04 +00001449 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(S->getOperand(0),
1450 LI->getType(), *TD)) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001451 UnavailableBlocks.push_back(DepBB);
1452 continue;
1453 }
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001454 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001455
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001456 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1457 S->getOperand(0)));
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001458 continue;
1459 }
1460
1461 if (LoadInst *LD = dyn_cast<LoadInst>(DepInst)) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001462 // If the types mismatch and we can't handle it, reject reuse of the load.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001463 if (LD->getType() != LI->getType()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001464 if (TD == 0)
1465 TD = getAnalysisIfAvailable<TargetData>();
1466
1467 // If the stored value is larger or equal to the loaded value, we can
1468 // reuse it.
Chris Lattner9045f232009-09-21 17:24:04 +00001469 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(LD, LI->getType(),*TD)){
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001470 UnavailableBlocks.push_back(DepBB);
1471 continue;
1472 }
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001473 }
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001474 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB, LD));
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001475 continue;
Owen Anderson5e5599b2007-07-25 19:57:03 +00001476 }
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001477
1478 UnavailableBlocks.push_back(DepBB);
1479 continue;
Chris Lattner2876a642008-03-21 21:14:38 +00001480 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001481
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001482 // If we have no predecessors that produce a known value for this load, exit
1483 // early.
1484 if (ValuesPerBlock.empty()) return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001485
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001486 // If all of the instructions we depend on produce a known value for this
1487 // load, then it is fully redundant and we can use PHI insertion to compute
1488 // its value. Insert PHIs and remove the fully redundant value now.
1489 if (UnavailableBlocks.empty()) {
David Greene2e6efc42010-01-05 01:27:17 +00001490 DEBUG(dbgs() << "GVN REMOVING NONLOCAL LOAD: " << *LI << '\n');
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001491
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001492 // Perform PHI construction.
Chris Lattnerbf200182009-12-21 23:15:48 +00001493 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD, *DT,
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001494 VN.getAliasAnalysis());
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001495 LI->replaceAllUsesWith(V);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001496
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001497 if (isa<PHINode>(V))
1498 V->takeName(LI);
Duncan Sands19d0b472010-02-16 11:11:14 +00001499 if (V->getType()->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001500 MD->invalidateCachedPointerInfo(V);
Bob Wilson1da90412010-02-22 21:39:41 +00001501 VN.erase(LI);
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001502 toErase.push_back(LI);
1503 NumGVNLoad++;
1504 return true;
1505 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001506
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001507 if (!EnablePRE || !EnableLoadPRE)
1508 return false;
1509
1510 // Okay, we have *some* definitions of the value. This means that the value
1511 // is available in some of our (transitive) predecessors. Lets think about
1512 // doing PRE of this load. This will involve inserting a new load into the
1513 // predecessor when it's not available. We could do this in general, but
1514 // prefer to not increase code size. As such, we only do this when we know
1515 // that we only have to insert *one* load (which means we're basically moving
1516 // the load, not inserting a new one).
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001517
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001518 SmallPtrSet<BasicBlock *, 4> Blockers;
1519 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1520 Blockers.insert(UnavailableBlocks[i]);
1521
1522 // Lets find first basic block with more than one predecessor. Walk backwards
1523 // through predecessors if needed.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001524 BasicBlock *LoadBB = LI->getParent();
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001525 BasicBlock *TmpBB = LoadBB;
1526
1527 bool isSinglePred = false;
Dale Johannesen81b64632009-06-17 20:48:23 +00001528 bool allSingleSucc = true;
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001529 while (TmpBB->getSinglePredecessor()) {
1530 isSinglePred = true;
1531 TmpBB = TmpBB->getSinglePredecessor();
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001532 if (TmpBB == LoadBB) // Infinite (unreachable) loop.
1533 return false;
1534 if (Blockers.count(TmpBB))
1535 return false;
Dale Johannesen81b64632009-06-17 20:48:23 +00001536 if (TmpBB->getTerminator()->getNumSuccessors() != 1)
1537 allSingleSucc = false;
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001538 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001539
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001540 assert(TmpBB);
1541 LoadBB = TmpBB;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001542
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001543 // If we have a repl set with LI itself in it, this means we have a loop where
1544 // at least one of the values is LI. Since this means that we won't be able
1545 // to eliminate LI even if we insert uses in the other predecessors, we will
1546 // end up increasing code size. Reject this by scanning for LI.
Bob Wilson0fd41582010-03-02 00:09:29 +00001547 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
1548 if (ValuesPerBlock[i].isSimpleValue() &&
1549 ValuesPerBlock[i].getSimpleValue() == LI) {
1550 // Skip cases where LI is the only definition, even for EnableFullLoadPRE.
1551 if (!EnableFullLoadPRE || e == 1)
Bob Wilsond517b522010-02-01 21:17:14 +00001552 return false;
Bob Wilson0fd41582010-03-02 00:09:29 +00001553 }
Bob Wilsond517b522010-02-01 21:17:14 +00001554 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001555
Chris Lattner93236ba2009-12-06 04:54:31 +00001556 // FIXME: It is extremely unclear what this loop is doing, other than
1557 // artificially restricting loadpre.
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001558 if (isSinglePred) {
1559 bool isHot = false;
Chris Lattner93236ba2009-12-06 04:54:31 +00001560 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
1561 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1562 if (AV.isSimpleValue())
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001563 // "Hot" Instruction is in some loop (because it dominates its dep.
1564 // instruction).
Chris Lattner93236ba2009-12-06 04:54:31 +00001565 if (Instruction *I = dyn_cast<Instruction>(AV.getSimpleValue()))
1566 if (DT->dominates(LI, I)) {
1567 isHot = true;
1568 break;
1569 }
1570 }
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001571
1572 // We are interested only in "hot" instructions. We don't want to do any
1573 // mis-optimizations here.
1574 if (!isHot)
1575 return false;
1576 }
1577
Bob Wilsond517b522010-02-01 21:17:14 +00001578 // Check to see how many predecessors have the loaded value fully
1579 // available.
1580 DenseMap<BasicBlock*, Value*> PredLoads;
Chris Lattnerd2a653a2008-12-05 07:49:08 +00001581 DenseMap<BasicBlock*, char> FullyAvailableBlocks;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001582 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i)
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001583 FullyAvailableBlocks[ValuesPerBlock[i].BB] = true;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001584 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1585 FullyAvailableBlocks[UnavailableBlocks[i]] = false;
1586
Bob Wilsona2fda8b2010-05-04 20:03:21 +00001587 SmallVector<std::pair<TerminatorInst*, unsigned>, 4> NeedToSplit;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001588 for (pred_iterator PI = pred_begin(LoadBB), E = pred_end(LoadBB);
1589 PI != E; ++PI) {
Bob Wilsond517b522010-02-01 21:17:14 +00001590 BasicBlock *Pred = *PI;
1591 if (IsValueFullyAvailableInBlock(Pred, FullyAvailableBlocks)) {
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001592 continue;
Bob Wilsond517b522010-02-01 21:17:14 +00001593 }
1594 PredLoads[Pred] = 0;
Bob Wilson92cdb6e2010-02-16 19:51:59 +00001595
Bob Wilsond517b522010-02-01 21:17:14 +00001596 if (Pred->getTerminator()->getNumSuccessors() != 1) {
Bob Wilson92cdb6e2010-02-16 19:51:59 +00001597 if (isa<IndirectBrInst>(Pred->getTerminator())) {
1598 DEBUG(dbgs() << "COULD NOT PRE LOAD BECAUSE OF INDBR CRITICAL EDGE '"
1599 << Pred->getName() << "': " << *LI << '\n');
1600 return false;
1601 }
Bob Wilsonaff96b22010-02-16 21:06:42 +00001602 unsigned SuccNum = GetSuccessorNumber(Pred, LoadBB);
Bob Wilsona2fda8b2010-05-04 20:03:21 +00001603 NeedToSplit.push_back(std::make_pair(Pred->getTerminator(), SuccNum));
Bob Wilsond517b522010-02-01 21:17:14 +00001604 }
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001605 }
Bob Wilsona2fda8b2010-05-04 20:03:21 +00001606 if (!NeedToSplit.empty()) {
1607 toSplit.append(NeedToSplit.size(), NeedToSplit.front());
Bob Wilson892432b2010-03-01 23:37:32 +00001608 return false;
Bob Wilsona2fda8b2010-05-04 20:03:21 +00001609 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001610
Bob Wilsond517b522010-02-01 21:17:14 +00001611 // Decide whether PRE is profitable for this load.
1612 unsigned NumUnavailablePreds = PredLoads.size();
1613 assert(NumUnavailablePreds != 0 &&
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001614 "Fully available value should be eliminated above!");
Bob Wilsond517b522010-02-01 21:17:14 +00001615 if (!EnableFullLoadPRE) {
1616 // If this load is unavailable in multiple predecessors, reject it.
1617 // FIXME: If we could restructure the CFG, we could make a common pred with
1618 // all the preds that don't have an available LI and insert a new load into
1619 // that one block.
1620 if (NumUnavailablePreds != 1)
1621 return false;
Owen Anderson0cc1a762007-08-07 23:12:31 +00001622 }
Bob Wilsond517b522010-02-01 21:17:14 +00001623
1624 // Check if the load can safely be moved to all the unavailable predecessors.
1625 bool CanDoPRE = true;
Chris Lattner44da5bd2009-11-28 15:39:14 +00001626 SmallVector<Instruction*, 8> NewInsts;
Bob Wilsond517b522010-02-01 21:17:14 +00001627 for (DenseMap<BasicBlock*, Value*>::iterator I = PredLoads.begin(),
1628 E = PredLoads.end(); I != E; ++I) {
1629 BasicBlock *UnavailablePred = I->first;
1630
1631 // Do PHI translation to get its value in the predecessor if necessary. The
1632 // returned pointer (if non-null) is guaranteed to dominate UnavailablePred.
1633
1634 // If all preds have a single successor, then we know it is safe to insert
1635 // the load on the pred (?!?), so we can insert code to materialize the
1636 // pointer if it is not available.
1637 PHITransAddr Address(LI->getOperand(0), TD);
1638 Value *LoadPtr = 0;
1639 if (allSingleSucc) {
1640 LoadPtr = Address.PHITranslateWithInsertion(LoadBB, UnavailablePred,
1641 *DT, NewInsts);
1642 } else {
Daniel Dunbar693ea892010-02-24 08:48:04 +00001643 Address.PHITranslateValue(LoadBB, UnavailablePred, DT);
Bob Wilsond517b522010-02-01 21:17:14 +00001644 LoadPtr = Address.getAddr();
Bob Wilsond517b522010-02-01 21:17:14 +00001645 }
1646
1647 // If we couldn't find or insert a computation of this phi translated value,
1648 // we fail PRE.
1649 if (LoadPtr == 0) {
1650 DEBUG(dbgs() << "COULDN'T INSERT PHI TRANSLATED VALUE OF: "
1651 << *LI->getOperand(0) << "\n");
1652 CanDoPRE = false;
1653 break;
1654 }
1655
1656 // Make sure it is valid to move this load here. We have to watch out for:
1657 // @1 = getelementptr (i8* p, ...
1658 // test p and branch if == 0
1659 // load @1
1660 // It is valid to have the getelementptr before the test, even if p can be 0,
1661 // as getelementptr only does address arithmetic.
1662 // If we are not pushing the value through any multiple-successor blocks
1663 // we do not have this case. Otherwise, check that the load is safe to
1664 // put anywhere; this can be improved, but should be conservatively safe.
1665 if (!allSingleSucc &&
1666 // FIXME: REEVALUTE THIS.
1667 !isSafeToLoadUnconditionally(LoadPtr,
1668 UnavailablePred->getTerminator(),
1669 LI->getAlignment(), TD)) {
1670 CanDoPRE = false;
1671 break;
1672 }
1673
1674 I->second = LoadPtr;
Chris Lattner972e6d82009-12-09 01:59:31 +00001675 }
1676
Bob Wilsond517b522010-02-01 21:17:14 +00001677 if (!CanDoPRE) {
1678 while (!NewInsts.empty())
1679 NewInsts.pop_back_val()->eraseFromParent();
Dale Johannesen81b64632009-06-17 20:48:23 +00001680 return false;
Chris Lattner32140312009-11-28 16:08:18 +00001681 }
Dale Johannesen81b64632009-06-17 20:48:23 +00001682
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001683 // Okay, we can eliminate this load by inserting a reload in the predecessor
1684 // and using PHI construction to get the value in the other predecessors, do
1685 // it.
David Greene2e6efc42010-01-05 01:27:17 +00001686 DEBUG(dbgs() << "GVN REMOVING PRE LOAD: " << *LI << '\n');
Chris Lattner32140312009-11-28 16:08:18 +00001687 DEBUG(if (!NewInsts.empty())
David Greene2e6efc42010-01-05 01:27:17 +00001688 dbgs() << "INSERTED " << NewInsts.size() << " INSTS: "
Chris Lattner32140312009-11-28 16:08:18 +00001689 << *NewInsts.back() << '\n');
1690
Bob Wilsond517b522010-02-01 21:17:14 +00001691 // Assign value numbers to the new instructions.
1692 for (unsigned i = 0, e = NewInsts.size(); i != e; ++i) {
1693 // FIXME: We really _ought_ to insert these value numbers into their
1694 // parent's availability map. However, in doing so, we risk getting into
1695 // ordering issues. If a block hasn't been processed yet, we would be
1696 // marking a value as AVAIL-IN, which isn't what we intend.
1697 VN.lookup_or_add(NewInsts[i]);
1698 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001699
Bob Wilsond517b522010-02-01 21:17:14 +00001700 for (DenseMap<BasicBlock*, Value*>::iterator I = PredLoads.begin(),
1701 E = PredLoads.end(); I != E; ++I) {
1702 BasicBlock *UnavailablePred = I->first;
1703 Value *LoadPtr = I->second;
1704
1705 Value *NewLoad = new LoadInst(LoadPtr, LI->getName()+".pre", false,
1706 LI->getAlignment(),
1707 UnavailablePred->getTerminator());
1708
1709 // Add the newly created load.
1710 ValuesPerBlock.push_back(AvailableValueInBlock::get(UnavailablePred,
1711 NewLoad));
Bob Wilson923261b2010-02-23 05:55:00 +00001712 MD->invalidateCachedPointerInfo(LoadPtr);
1713 DEBUG(dbgs() << "GVN INSERTED " << *NewLoad << '\n');
Bob Wilsond517b522010-02-01 21:17:14 +00001714 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001715
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001716 // Perform PHI construction.
Chris Lattnerbf200182009-12-21 23:15:48 +00001717 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD, *DT,
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001718 VN.getAliasAnalysis());
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001719 LI->replaceAllUsesWith(V);
1720 if (isa<PHINode>(V))
1721 V->takeName(LI);
Duncan Sands19d0b472010-02-16 11:11:14 +00001722 if (V->getType()->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001723 MD->invalidateCachedPointerInfo(V);
Bob Wilson1da90412010-02-22 21:39:41 +00001724 VN.erase(LI);
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001725 toErase.push_back(LI);
1726 NumPRELoad++;
Owen Anderson5e5599b2007-07-25 19:57:03 +00001727 return true;
1728}
1729
Owen Anderson221a4362007-08-16 22:02:55 +00001730/// processLoad - Attempt to eliminate a load, first by eliminating it
1731/// locally, and then attempting non-local elimination if that fails.
Chris Lattner0e3d6332008-12-05 21:04:20 +00001732bool GVN::processLoad(LoadInst *L, SmallVectorImpl<Instruction*> &toErase) {
Dan Gohman81132462009-11-14 02:27:51 +00001733 if (!MD)
1734 return false;
1735
Chris Lattner0e3d6332008-12-05 21:04:20 +00001736 if (L->isVolatile())
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001737 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001738
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001739 // ... to a pointer that has been loaded from before...
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001740 MemDepResult Dep = MD->getDependency(L);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001741
Chris Lattner0e3d6332008-12-05 21:04:20 +00001742 // If the value isn't available, don't do anything!
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001743 if (Dep.isClobber()) {
Chris Lattner0a9616d2009-09-21 05:57:11 +00001744 // Check to see if we have something like this:
Chris Lattner1dd48c32009-09-20 19:03:47 +00001745 // store i32 123, i32* %P
1746 // %A = bitcast i32* %P to i8*
1747 // %B = gep i8* %A, i32 1
1748 // %C = load i8* %B
1749 //
1750 // We could do that by recognizing if the clobber instructions are obviously
1751 // a common base + constant offset, and if the previous store (or memset)
1752 // completely covers this load. This sort of thing can happen in bitfield
1753 // access code.
Chris Lattner42376062009-12-06 01:57:02 +00001754 Value *AvailVal = 0;
Chris Lattner0a9616d2009-09-21 05:57:11 +00001755 if (StoreInst *DepSI = dyn_cast<StoreInst>(Dep.getInst()))
Chris Lattner9d7fb292009-09-21 06:22:46 +00001756 if (const TargetData *TD = getAnalysisIfAvailable<TargetData>()) {
Chris Lattner07df9ef2009-12-09 07:37:07 +00001757 int Offset = AnalyzeLoadFromClobberingStore(L->getType(),
1758 L->getPointerOperand(),
1759 DepSI, *TD);
Chris Lattner42376062009-12-06 01:57:02 +00001760 if (Offset != -1)
1761 AvailVal = GetStoreValueForLoad(DepSI->getOperand(0), Offset,
1762 L->getType(), L, *TD);
Chris Lattner9d7fb292009-09-21 06:22:46 +00001763 }
Chris Lattner0a9616d2009-09-21 05:57:11 +00001764
Chris Lattner42376062009-12-06 01:57:02 +00001765 // If the clobbering value is a memset/memcpy/memmove, see if we can forward
1766 // a value on from it.
1767 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(Dep.getInst())) {
1768 if (const TargetData *TD = getAnalysisIfAvailable<TargetData>()) {
Chris Lattner07df9ef2009-12-09 07:37:07 +00001769 int Offset = AnalyzeLoadFromClobberingMemInst(L->getType(),
1770 L->getPointerOperand(),
1771 DepMI, *TD);
Chris Lattner42376062009-12-06 01:57:02 +00001772 if (Offset != -1)
1773 AvailVal = GetMemInstValueForLoad(DepMI, Offset, L->getType(), L,*TD);
1774 }
1775 }
1776
1777 if (AvailVal) {
David Greene2e6efc42010-01-05 01:27:17 +00001778 DEBUG(dbgs() << "GVN COERCED INST:\n" << *Dep.getInst() << '\n'
Chris Lattner42376062009-12-06 01:57:02 +00001779 << *AvailVal << '\n' << *L << "\n\n\n");
1780
1781 // Replace the load!
1782 L->replaceAllUsesWith(AvailVal);
Duncan Sands19d0b472010-02-16 11:11:14 +00001783 if (AvailVal->getType()->isPointerTy())
Chris Lattner42376062009-12-06 01:57:02 +00001784 MD->invalidateCachedPointerInfo(AvailVal);
Bob Wilson1da90412010-02-22 21:39:41 +00001785 VN.erase(L);
Chris Lattner42376062009-12-06 01:57:02 +00001786 toErase.push_back(L);
1787 NumGVNLoad++;
1788 return true;
1789 }
1790
Torok Edwin72070282009-05-29 09:46:03 +00001791 DEBUG(
1792 // fast print dep, using operator<< on instruction would be too slow
David Greene2e6efc42010-01-05 01:27:17 +00001793 dbgs() << "GVN: load ";
1794 WriteAsOperand(dbgs(), L);
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001795 Instruction *I = Dep.getInst();
David Greene2e6efc42010-01-05 01:27:17 +00001796 dbgs() << " is clobbered by " << *I << '\n';
Torok Edwin72070282009-05-29 09:46:03 +00001797 );
Chris Lattner0e3d6332008-12-05 21:04:20 +00001798 return false;
Torok Edwin72070282009-05-29 09:46:03 +00001799 }
Chris Lattner0e3d6332008-12-05 21:04:20 +00001800
1801 // If it is defined in another block, try harder.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001802 if (Dep.isNonLocal())
Chris Lattner0e3d6332008-12-05 21:04:20 +00001803 return processNonLocalLoad(L, toErase);
Eli Friedman716c10c2008-02-12 12:08:14 +00001804
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001805 Instruction *DepInst = Dep.getInst();
Chris Lattner0e3d6332008-12-05 21:04:20 +00001806 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInst)) {
Chris Lattner1dd48c32009-09-20 19:03:47 +00001807 Value *StoredVal = DepSI->getOperand(0);
1808
1809 // The store and load are to a must-aliased pointer, but they may not
1810 // actually have the same type. See if we know how to reuse the stored
1811 // value (depending on its type).
1812 const TargetData *TD = 0;
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001813 if (StoredVal->getType() != L->getType()) {
1814 if ((TD = getAnalysisIfAvailable<TargetData>())) {
1815 StoredVal = CoerceAvailableValueToLoadType(StoredVal, L->getType(),
1816 L, *TD);
1817 if (StoredVal == 0)
1818 return false;
1819
David Greene2e6efc42010-01-05 01:27:17 +00001820 DEBUG(dbgs() << "GVN COERCED STORE:\n" << *DepSI << '\n' << *StoredVal
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001821 << '\n' << *L << "\n\n\n");
1822 }
1823 else
Chris Lattner1dd48c32009-09-20 19:03:47 +00001824 return false;
Chris Lattner1dd48c32009-09-20 19:03:47 +00001825 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001826
Chris Lattner0e3d6332008-12-05 21:04:20 +00001827 // Remove it!
Chris Lattner1dd48c32009-09-20 19:03:47 +00001828 L->replaceAllUsesWith(StoredVal);
Duncan Sands19d0b472010-02-16 11:11:14 +00001829 if (StoredVal->getType()->isPointerTy())
Chris Lattner1dd48c32009-09-20 19:03:47 +00001830 MD->invalidateCachedPointerInfo(StoredVal);
Bob Wilson1da90412010-02-22 21:39:41 +00001831 VN.erase(L);
Chris Lattner0e3d6332008-12-05 21:04:20 +00001832 toErase.push_back(L);
1833 NumGVNLoad++;
1834 return true;
1835 }
1836
1837 if (LoadInst *DepLI = dyn_cast<LoadInst>(DepInst)) {
Chris Lattner1dd48c32009-09-20 19:03:47 +00001838 Value *AvailableVal = DepLI;
1839
1840 // The loads are of a must-aliased pointer, but they may not actually have
1841 // the same type. See if we know how to reuse the previously loaded value
1842 // (depending on its type).
1843 const TargetData *TD = 0;
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001844 if (DepLI->getType() != L->getType()) {
1845 if ((TD = getAnalysisIfAvailable<TargetData>())) {
1846 AvailableVal = CoerceAvailableValueToLoadType(DepLI, L->getType(), L,*TD);
1847 if (AvailableVal == 0)
1848 return false;
Chris Lattner1dd48c32009-09-20 19:03:47 +00001849
David Greene2e6efc42010-01-05 01:27:17 +00001850 DEBUG(dbgs() << "GVN COERCED LOAD:\n" << *DepLI << "\n" << *AvailableVal
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001851 << "\n" << *L << "\n\n\n");
1852 }
1853 else
1854 return false;
Chris Lattner1dd48c32009-09-20 19:03:47 +00001855 }
1856
Chris Lattner0e3d6332008-12-05 21:04:20 +00001857 // Remove it!
Chris Lattner1dd48c32009-09-20 19:03:47 +00001858 L->replaceAllUsesWith(AvailableVal);
Duncan Sands19d0b472010-02-16 11:11:14 +00001859 if (DepLI->getType()->isPointerTy())
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00001860 MD->invalidateCachedPointerInfo(DepLI);
Bob Wilson1da90412010-02-22 21:39:41 +00001861 VN.erase(L);
Chris Lattner0e3d6332008-12-05 21:04:20 +00001862 toErase.push_back(L);
1863 NumGVNLoad++;
1864 return true;
1865 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001866
Chris Lattner3ff6d012008-11-30 01:39:32 +00001867 // If this load really doesn't depend on anything, then we must be loading an
1868 // undef value. This can happen when loading for a fresh allocation with no
1869 // intervening stores, for example.
Victor Hernandez8acf2952009-10-23 21:09:37 +00001870 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst)) {
Owen Andersonb292b8c2009-07-30 23:03:37 +00001871 L->replaceAllUsesWith(UndefValue::get(L->getType()));
Bob Wilson1da90412010-02-22 21:39:41 +00001872 VN.erase(L);
Chris Lattner3ff6d012008-11-30 01:39:32 +00001873 toErase.push_back(L);
Chris Lattner3ff6d012008-11-30 01:39:32 +00001874 NumGVNLoad++;
Chris Lattner0e3d6332008-12-05 21:04:20 +00001875 return true;
Eli Friedman716c10c2008-02-12 12:08:14 +00001876 }
Owen Anderson2b2bd282009-10-28 07:05:35 +00001877
Owen Andersonb9878ee2009-12-02 07:35:19 +00001878 // If this load occurs either right after a lifetime begin,
Owen Anderson2b2bd282009-10-28 07:05:35 +00001879 // then the loaded value is undefined.
1880 if (IntrinsicInst* II = dyn_cast<IntrinsicInst>(DepInst)) {
Owen Andersonb9878ee2009-12-02 07:35:19 +00001881 if (II->getIntrinsicID() == Intrinsic::lifetime_start) {
Owen Anderson2b2bd282009-10-28 07:05:35 +00001882 L->replaceAllUsesWith(UndefValue::get(L->getType()));
Bob Wilson1da90412010-02-22 21:39:41 +00001883 VN.erase(L);
Owen Anderson2b2bd282009-10-28 07:05:35 +00001884 toErase.push_back(L);
1885 NumGVNLoad++;
1886 return true;
1887 }
1888 }
Eli Friedman716c10c2008-02-12 12:08:14 +00001889
Chris Lattner0e3d6332008-12-05 21:04:20 +00001890 return false;
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001891}
1892
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001893Value *GVN::lookupNumber(BasicBlock *BB, uint32_t num) {
Owen Anderson54e02192008-06-23 17:49:45 +00001894 DenseMap<BasicBlock*, ValueNumberScope*>::iterator I = localAvail.find(BB);
1895 if (I == localAvail.end())
1896 return 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001897
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001898 ValueNumberScope *Locals = I->second;
1899 while (Locals) {
1900 DenseMap<uint32_t, Value*>::iterator I = Locals->table.find(num);
1901 if (I != Locals->table.end())
Owen Anderson1b3ea962008-06-20 01:15:47 +00001902 return I->second;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001903 Locals = Locals->parent;
Owen Anderson1b3ea962008-06-20 01:15:47 +00001904 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001905
Owen Anderson1b3ea962008-06-20 01:15:47 +00001906 return 0;
1907}
1908
Owen Andersonbfe133e2008-12-15 02:03:00 +00001909
Owen Anderson398602a2007-08-14 18:16:29 +00001910/// processInstruction - When calculating availability, handle an instruction
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001911/// by inserting it into the appropriate sets
Owen Andersonaccdca12008-06-12 19:25:32 +00001912bool GVN::processInstruction(Instruction *I,
Chris Lattner804209d2008-03-21 22:01:16 +00001913 SmallVectorImpl<Instruction*> &toErase) {
Devang Patel03936a12010-02-11 00:20:49 +00001914 // Ignore dbg info intrinsics.
1915 if (isa<DbgInfoIntrinsic>(I))
1916 return false;
1917
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001918 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
1919 bool Changed = processLoad(LI, toErase);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001920
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001921 if (!Changed) {
1922 unsigned Num = VN.lookup_or_add(LI);
1923 localAvail[I->getParent()]->table.insert(std::make_pair(Num, LI));
Owen Anderson6a903bc2008-06-18 21:41:49 +00001924 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001925
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001926 return Changed;
Owen Anderson6a903bc2008-06-18 21:41:49 +00001927 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001928
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001929 uint32_t NextNum = VN.getNextUnusedValueNumber();
1930 unsigned Num = VN.lookup_or_add(I);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001931
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001932 if (BranchInst *BI = dyn_cast<BranchInst>(I)) {
1933 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001934
Owen Anderson98f912b2009-04-01 23:53:49 +00001935 if (!BI->isConditional() || isa<Constant>(BI->getCondition()))
1936 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001937
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001938 Value *BranchCond = BI->getCondition();
1939 uint32_t CondVN = VN.lookup_or_add(BranchCond);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001940
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001941 BasicBlock *TrueSucc = BI->getSuccessor(0);
1942 BasicBlock *FalseSucc = BI->getSuccessor(1);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001943
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001944 if (TrueSucc->getSinglePredecessor())
1945 localAvail[TrueSucc]->table[CondVN] =
1946 ConstantInt::getTrue(TrueSucc->getContext());
1947 if (FalseSucc->getSinglePredecessor())
1948 localAvail[FalseSucc]->table[CondVN] =
1949 ConstantInt::getFalse(TrueSucc->getContext());
Owen Anderson98f912b2009-04-01 23:53:49 +00001950
1951 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001952
Owen Anderson0c1e6342008-04-07 09:59:07 +00001953 // Allocations are always uniquely numbered, so we can save time and memory
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001954 // by fast failing them.
Victor Hernandez8acf2952009-10-23 21:09:37 +00001955 } else if (isa<AllocaInst>(I) || isa<TerminatorInst>(I)) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001956 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Anderson0c1e6342008-04-07 09:59:07 +00001957 return false;
Owen Anderson6a903bc2008-06-18 21:41:49 +00001958 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001959
Owen Anderson221a4362007-08-16 22:02:55 +00001960 // Collapse PHI nodes
Owen Andersonbc271a02007-08-14 18:33:27 +00001961 if (PHINode* p = dyn_cast<PHINode>(I)) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001962 Value *constVal = CollapsePhi(p);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001963
Owen Andersonbc271a02007-08-14 18:33:27 +00001964 if (constVal) {
Owen Andersonf5023a72007-08-16 22:51:56 +00001965 p->replaceAllUsesWith(constVal);
Duncan Sands19d0b472010-02-16 11:11:14 +00001966 if (MD && constVal->getType()->isPointerTy())
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00001967 MD->invalidateCachedPointerInfo(constVal);
Owen Anderson164274e2008-12-23 00:49:51 +00001968 VN.erase(p);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001969
Owen Andersonf5023a72007-08-16 22:51:56 +00001970 toErase.push_back(p);
Owen Anderson6a903bc2008-06-18 21:41:49 +00001971 } else {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001972 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Andersonbc271a02007-08-14 18:33:27 +00001973 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001974
Owen Anderson3ea90a72008-07-03 17:44:33 +00001975 // If the number we were assigned was a brand new VN, then we don't
1976 // need to do a lookup to see if the number already exists
1977 // somewhere in the domtree: it can't!
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001978 } else if (Num == NextNum) {
1979 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001980
Owen Andersonbfe133e2008-12-15 02:03:00 +00001981 // Perform fast-path value-number based elimination of values inherited from
1982 // dominators.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001983 } else if (Value *repl = lookupNumber(I->getParent(), Num)) {
Owen Anderson086b2c42007-12-08 01:37:09 +00001984 // Remove it!
Owen Anderson10ffa862007-07-31 23:27:13 +00001985 VN.erase(I);
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001986 I->replaceAllUsesWith(repl);
Duncan Sands19d0b472010-02-16 11:11:14 +00001987 if (MD && repl->getType()->isPointerTy())
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00001988 MD->invalidateCachedPointerInfo(repl);
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001989 toErase.push_back(I);
1990 return true;
Owen Andersonbfe133e2008-12-15 02:03:00 +00001991
Owen Anderson3ea90a72008-07-03 17:44:33 +00001992 } else {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001993 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001994 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001995
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001996 return false;
1997}
1998
Bill Wendling456e8852008-12-22 22:32:22 +00001999/// runOnFunction - This is the main transformation entry point for a function.
Owen Anderson676070d2007-08-14 18:04:11 +00002000bool GVN::runOnFunction(Function& F) {
Dan Gohman81132462009-11-14 02:27:51 +00002001 if (!NoLoads)
2002 MD = &getAnalysis<MemoryDependenceAnalysis>();
Chris Lattner8541ede2008-12-01 00:40:32 +00002003 DT = &getAnalysis<DominatorTree>();
Owen Andersonf7928602008-05-12 20:15:55 +00002004 VN.setAliasAnalysis(&getAnalysis<AliasAnalysis>());
Chris Lattner8541ede2008-12-01 00:40:32 +00002005 VN.setMemDep(MD);
2006 VN.setDomTree(DT);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002007
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002008 bool Changed = false;
2009 bool ShouldContinue = true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002010
Owen Andersonac310962008-07-16 17:52:31 +00002011 // Merge unconditional branches, allowing PRE to catch more
2012 // optimization opportunities.
2013 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002014 BasicBlock *BB = FI;
Owen Andersonac310962008-07-16 17:52:31 +00002015 ++FI;
Owen Andersonc0623812008-07-17 00:01:40 +00002016 bool removedBlock = MergeBlockIntoPredecessor(BB, this);
2017 if (removedBlock) NumGVNBlocks++;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002018
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002019 Changed |= removedBlock;
Owen Andersonac310962008-07-16 17:52:31 +00002020 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002021
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002022 unsigned Iteration = 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002023
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002024 while (ShouldContinue) {
David Greene2e6efc42010-01-05 01:27:17 +00002025 DEBUG(dbgs() << "GVN iteration: " << Iteration << "\n");
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002026 ShouldContinue = iterateOnFunction(F);
Bob Wilson92cdb6e2010-02-16 19:51:59 +00002027 if (splitCriticalEdges())
2028 ShouldContinue = true;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002029 Changed |= ShouldContinue;
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002030 ++Iteration;
Owen Anderson676070d2007-08-14 18:04:11 +00002031 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002032
Owen Anderson04a6e0b2008-07-18 18:03:38 +00002033 if (EnablePRE) {
Owen Anderson2fbfb702008-09-03 23:06:07 +00002034 bool PREChanged = true;
2035 while (PREChanged) {
2036 PREChanged = performPRE(F);
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002037 Changed |= PREChanged;
Owen Anderson2fbfb702008-09-03 23:06:07 +00002038 }
Owen Anderson04a6e0b2008-07-18 18:03:38 +00002039 }
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002040 // FIXME: Should perform GVN again after PRE does something. PRE can move
2041 // computations into blocks where they become fully redundant. Note that
2042 // we can't do this until PRE's critical edge splitting updates memdep.
2043 // Actually, when this happens, we should just fully integrate PRE into GVN.
Nuno Lopese3127f32008-10-10 16:25:50 +00002044
2045 cleanupGlobalSets();
2046
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002047 return Changed;
Owen Anderson676070d2007-08-14 18:04:11 +00002048}
2049
2050
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002051bool GVN::processBlock(BasicBlock *BB) {
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002052 // FIXME: Kill off toErase by doing erasing eagerly in a helper function (and
2053 // incrementing BI before processing an instruction).
Owen Andersonaccdca12008-06-12 19:25:32 +00002054 SmallVector<Instruction*, 8> toErase;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002055 bool ChangedFunction = false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002056
Owen Andersonaccdca12008-06-12 19:25:32 +00002057 for (BasicBlock::iterator BI = BB->begin(), BE = BB->end();
2058 BI != BE;) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002059 ChangedFunction |= processInstruction(BI, toErase);
Owen Andersonaccdca12008-06-12 19:25:32 +00002060 if (toErase.empty()) {
2061 ++BI;
2062 continue;
2063 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002064
Owen Andersonaccdca12008-06-12 19:25:32 +00002065 // If we need some instructions deleted, do it now.
2066 NumGVNInstr += toErase.size();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002067
Owen Andersonaccdca12008-06-12 19:25:32 +00002068 // Avoid iterator invalidation.
2069 bool AtStart = BI == BB->begin();
2070 if (!AtStart)
2071 --BI;
2072
2073 for (SmallVector<Instruction*, 4>::iterator I = toErase.begin(),
Chris Lattner8541ede2008-12-01 00:40:32 +00002074 E = toErase.end(); I != E; ++I) {
David Greene2e6efc42010-01-05 01:27:17 +00002075 DEBUG(dbgs() << "GVN removed: " << **I << '\n');
Dan Gohman81132462009-11-14 02:27:51 +00002076 if (MD) MD->removeInstruction(*I);
Owen Andersonaccdca12008-06-12 19:25:32 +00002077 (*I)->eraseFromParent();
Bill Wendlingebb6a542008-12-22 21:57:30 +00002078 DEBUG(verifyRemoved(*I));
Chris Lattner8541ede2008-12-01 00:40:32 +00002079 }
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002080 toErase.clear();
Owen Andersonaccdca12008-06-12 19:25:32 +00002081
2082 if (AtStart)
2083 BI = BB->begin();
2084 else
2085 ++BI;
Owen Andersonaccdca12008-06-12 19:25:32 +00002086 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002087
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002088 return ChangedFunction;
Owen Andersonaccdca12008-06-12 19:25:32 +00002089}
2090
Owen Anderson6a903bc2008-06-18 21:41:49 +00002091/// performPRE - Perform a purely local form of PRE that looks for diamond
2092/// control flow patterns and attempts to perform simple PRE at the join point.
Chris Lattnera546dcf2009-10-31 22:11:15 +00002093bool GVN::performPRE(Function &F) {
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002094 bool Changed = false;
Chris Lattnerf00aae42008-12-01 07:29:03 +00002095 DenseMap<BasicBlock*, Value*> predMap;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002096 for (df_iterator<BasicBlock*> DI = df_begin(&F.getEntryBlock()),
2097 DE = df_end(&F.getEntryBlock()); DI != DE; ++DI) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002098 BasicBlock *CurrentBlock = *DI;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002099
Owen Anderson6a903bc2008-06-18 21:41:49 +00002100 // Nothing to PRE in the entry block.
2101 if (CurrentBlock == &F.getEntryBlock()) continue;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002102
Owen Anderson6a903bc2008-06-18 21:41:49 +00002103 for (BasicBlock::iterator BI = CurrentBlock->begin(),
2104 BE = CurrentBlock->end(); BI != BE; ) {
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002105 Instruction *CurInst = BI++;
Duncan Sands1efabaa2009-05-06 06:49:50 +00002106
Victor Hernandez8acf2952009-10-23 21:09:37 +00002107 if (isa<AllocaInst>(CurInst) ||
Victor Hernandez5d034492009-09-18 22:35:49 +00002108 isa<TerminatorInst>(CurInst) || isa<PHINode>(CurInst) ||
Devang Patel92f86192009-10-14 17:29:00 +00002109 CurInst->getType()->isVoidTy() ||
Duncan Sands1efabaa2009-05-06 06:49:50 +00002110 CurInst->mayReadFromMemory() || CurInst->mayHaveSideEffects() ||
John Criswell073e4d12009-03-10 15:04:53 +00002111 isa<DbgInfoIntrinsic>(CurInst))
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002112 continue;
Duncan Sands1efabaa2009-05-06 06:49:50 +00002113
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002114 uint32_t ValNo = VN.lookup(CurInst);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002115
Owen Anderson6a903bc2008-06-18 21:41:49 +00002116 // Look for the predecessors for PRE opportunities. We're
2117 // only trying to solve the basic diamond case, where
2118 // a value is computed in the successor and one predecessor,
2119 // but not the other. We also explicitly disallow cases
2120 // where the successor is its own predecessor, because they're
2121 // more complicated to get right.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002122 unsigned NumWith = 0;
2123 unsigned NumWithout = 0;
2124 BasicBlock *PREPred = 0;
Chris Lattnerf00aae42008-12-01 07:29:03 +00002125 predMap.clear();
2126
Owen Anderson6a903bc2008-06-18 21:41:49 +00002127 for (pred_iterator PI = pred_begin(CurrentBlock),
2128 PE = pred_end(CurrentBlock); PI != PE; ++PI) {
2129 // We're not interested in PRE where the block is its
Bob Wilson76e8c592010-02-03 00:33:21 +00002130 // own predecessor, or in blocks with predecessors
Owen Anderson1b3ea962008-06-20 01:15:47 +00002131 // that are not reachable.
2132 if (*PI == CurrentBlock) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002133 NumWithout = 2;
Owen Anderson1b3ea962008-06-20 01:15:47 +00002134 break;
2135 } else if (!localAvail.count(*PI)) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002136 NumWithout = 2;
Owen Anderson1b3ea962008-06-20 01:15:47 +00002137 break;
2138 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002139
2140 DenseMap<uint32_t, Value*>::iterator predV =
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002141 localAvail[*PI]->table.find(ValNo);
Owen Anderson1b3ea962008-06-20 01:15:47 +00002142 if (predV == localAvail[*PI]->table.end()) {
Owen Anderson6a903bc2008-06-18 21:41:49 +00002143 PREPred = *PI;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002144 NumWithout++;
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002145 } else if (predV->second == CurInst) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002146 NumWithout = 2;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002147 } else {
Owen Anderson1b3ea962008-06-20 01:15:47 +00002148 predMap[*PI] = predV->second;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002149 NumWith++;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002150 }
2151 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002152
Owen Anderson6a903bc2008-06-18 21:41:49 +00002153 // Don't do PRE when it might increase code size, i.e. when
2154 // we would need to insert instructions in more than one pred.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002155 if (NumWithout != 1 || NumWith == 0)
Owen Anderson6a903bc2008-06-18 21:41:49 +00002156 continue;
Chris Lattnera546dcf2009-10-31 22:11:15 +00002157
2158 // Don't do PRE across indirect branch.
2159 if (isa<IndirectBrInst>(PREPred->getTerminator()))
2160 continue;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002161
Owen Andersonfdf9f162008-06-19 19:54:19 +00002162 // We can't do PRE safely on a critical edge, so instead we schedule
2163 // the edge to be split and perform the PRE the next time we iterate
2164 // on the function.
Bob Wilsonaff96b22010-02-16 21:06:42 +00002165 unsigned SuccNum = GetSuccessorNumber(PREPred, CurrentBlock);
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002166 if (isCriticalEdge(PREPred->getTerminator(), SuccNum)) {
2167 toSplit.push_back(std::make_pair(PREPred->getTerminator(), SuccNum));
Owen Andersonfdf9f162008-06-19 19:54:19 +00002168 continue;
2169 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002170
Bob Wilson76e8c592010-02-03 00:33:21 +00002171 // Instantiate the expression in the predecessor that lacked it.
Owen Anderson6a903bc2008-06-18 21:41:49 +00002172 // Because we are going top-down through the block, all value numbers
2173 // will be available in the predecessor by the time we need them. Any
Bob Wilson76e8c592010-02-03 00:33:21 +00002174 // that weren't originally present will have been instantiated earlier
Owen Anderson6a903bc2008-06-18 21:41:49 +00002175 // in this loop.
Nick Lewycky42fb7452009-09-27 07:38:41 +00002176 Instruction *PREInstr = CurInst->clone();
Owen Anderson6a903bc2008-06-18 21:41:49 +00002177 bool success = true;
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002178 for (unsigned i = 0, e = CurInst->getNumOperands(); i != e; ++i) {
2179 Value *Op = PREInstr->getOperand(i);
2180 if (isa<Argument>(Op) || isa<Constant>(Op) || isa<GlobalValue>(Op))
2181 continue;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002182
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002183 if (Value *V = lookupNumber(PREPred, VN.lookup(Op))) {
2184 PREInstr->setOperand(i, V);
2185 } else {
2186 success = false;
2187 break;
Owen Anderson8e462e92008-07-11 20:05:13 +00002188 }
Owen Anderson6a903bc2008-06-18 21:41:49 +00002189 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002190
Owen Anderson6a903bc2008-06-18 21:41:49 +00002191 // Fail out if we encounter an operand that is not available in
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002192 // the PRE predecessor. This is typically because of loads which
Owen Anderson6a903bc2008-06-18 21:41:49 +00002193 // are not value numbered precisely.
2194 if (!success) {
2195 delete PREInstr;
Bill Wendling3c793442008-12-22 22:14:07 +00002196 DEBUG(verifyRemoved(PREInstr));
Owen Anderson6a903bc2008-06-18 21:41:49 +00002197 continue;
2198 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002199
Owen Anderson6a903bc2008-06-18 21:41:49 +00002200 PREInstr->insertBefore(PREPred->getTerminator());
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002201 PREInstr->setName(CurInst->getName() + ".pre");
Owen Anderson1b3ea962008-06-20 01:15:47 +00002202 predMap[PREPred] = PREInstr;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002203 VN.add(PREInstr, ValNo);
Owen Anderson6a903bc2008-06-18 21:41:49 +00002204 NumGVNPRE++;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002205
Owen Anderson6a903bc2008-06-18 21:41:49 +00002206 // Update the availability map to include the new instruction.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002207 localAvail[PREPred]->table.insert(std::make_pair(ValNo, PREInstr));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002208
Owen Anderson6a903bc2008-06-18 21:41:49 +00002209 // Create a PHI to make the value available in this block.
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002210 PHINode* Phi = PHINode::Create(CurInst->getType(),
2211 CurInst->getName() + ".pre-phi",
Owen Anderson6a903bc2008-06-18 21:41:49 +00002212 CurrentBlock->begin());
2213 for (pred_iterator PI = pred_begin(CurrentBlock),
2214 PE = pred_end(CurrentBlock); PI != PE; ++PI)
Owen Anderson1b3ea962008-06-20 01:15:47 +00002215 Phi->addIncoming(predMap[*PI], *PI);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002216
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002217 VN.add(Phi, ValNo);
2218 localAvail[CurrentBlock]->table[ValNo] = Phi;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002219
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002220 CurInst->replaceAllUsesWith(Phi);
Duncan Sands19d0b472010-02-16 11:11:14 +00002221 if (MD && Phi->getType()->isPointerTy())
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00002222 MD->invalidateCachedPointerInfo(Phi);
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002223 VN.erase(CurInst);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002224
David Greene2e6efc42010-01-05 01:27:17 +00002225 DEBUG(dbgs() << "GVN PRE removed: " << *CurInst << '\n');
Dan Gohman81132462009-11-14 02:27:51 +00002226 if (MD) MD->removeInstruction(CurInst);
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002227 CurInst->eraseFromParent();
Bill Wendlingebb6a542008-12-22 21:57:30 +00002228 DEBUG(verifyRemoved(CurInst));
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002229 Changed = true;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002230 }
2231 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002232
Bob Wilson92cdb6e2010-02-16 19:51:59 +00002233 if (splitCriticalEdges())
2234 Changed = true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002235
Bob Wilson92cdb6e2010-02-16 19:51:59 +00002236 return Changed;
2237}
2238
2239/// splitCriticalEdges - Split critical edges found during the previous
2240/// iteration that may enable further optimization.
2241bool GVN::splitCriticalEdges() {
2242 if (toSplit.empty())
2243 return false;
2244 do {
2245 std::pair<TerminatorInst*, unsigned> Edge = toSplit.pop_back_val();
2246 SplitCriticalEdge(Edge.first, Edge.second, this);
2247 } while (!toSplit.empty());
Evan Cheng7263cf8432010-03-01 22:23:12 +00002248 if (MD) MD->invalidateCachedPredecessors();
Bob Wilson92cdb6e2010-02-16 19:51:59 +00002249 return true;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002250}
2251
Bill Wendling456e8852008-12-22 22:32:22 +00002252/// iterateOnFunction - Executes one iteration of GVN
Owen Anderson676070d2007-08-14 18:04:11 +00002253bool GVN::iterateOnFunction(Function &F) {
Nuno Lopese3127f32008-10-10 16:25:50 +00002254 cleanupGlobalSets();
Chris Lattnerbeb216d2008-03-21 21:33:23 +00002255
Owen Anderson98f912b2009-04-01 23:53:49 +00002256 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2257 DE = df_end(DT->getRootNode()); DI != DE; ++DI) {
2258 if (DI->getIDom())
2259 localAvail[DI->getBlock()] =
2260 new ValueNumberScope(localAvail[DI->getIDom()->getBlock()]);
2261 else
2262 localAvail[DI->getBlock()] = new ValueNumberScope(0);
2263 }
2264
Owen Andersonab6ec2e2007-07-24 17:55:58 +00002265 // Top-down walk of the dominator tree
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002266 bool Changed = false;
Owen Anderson03aacba2008-12-15 03:52:17 +00002267#if 0
2268 // Needed for value numbering with phi construction to work.
Owen Andersonbfe133e2008-12-15 02:03:00 +00002269 ReversePostOrderTraversal<Function*> RPOT(&F);
2270 for (ReversePostOrderTraversal<Function*>::rpo_iterator RI = RPOT.begin(),
2271 RE = RPOT.end(); RI != RE; ++RI)
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002272 Changed |= processBlock(*RI);
Owen Anderson03aacba2008-12-15 03:52:17 +00002273#else
2274 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2275 DE = df_end(DT->getRootNode()); DI != DE; ++DI)
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002276 Changed |= processBlock(DI->getBlock());
Owen Anderson03aacba2008-12-15 03:52:17 +00002277#endif
2278
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002279 return Changed;
Owen Andersonab6ec2e2007-07-24 17:55:58 +00002280}
Nuno Lopese3127f32008-10-10 16:25:50 +00002281
2282void GVN::cleanupGlobalSets() {
2283 VN.clear();
Nuno Lopese3127f32008-10-10 16:25:50 +00002284
2285 for (DenseMap<BasicBlock*, ValueNumberScope*>::iterator
2286 I = localAvail.begin(), E = localAvail.end(); I != E; ++I)
2287 delete I->second;
2288 localAvail.clear();
2289}
Bill Wendling6b18a392008-12-22 21:36:08 +00002290
2291/// verifyRemoved - Verify that the specified instruction does not occur in our
2292/// internal data structures.
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002293void GVN::verifyRemoved(const Instruction *Inst) const {
2294 VN.verifyRemoved(Inst);
Bill Wendling3c793442008-12-22 22:14:07 +00002295
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002296 // Walk through the value number scope to make sure the instruction isn't
2297 // ferreted away in it.
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +00002298 for (DenseMap<BasicBlock*, ValueNumberScope*>::const_iterator
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002299 I = localAvail.begin(), E = localAvail.end(); I != E; ++I) {
2300 const ValueNumberScope *VNS = I->second;
2301
2302 while (VNS) {
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +00002303 for (DenseMap<uint32_t, Value*>::const_iterator
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002304 II = VNS->table.begin(), IE = VNS->table.end(); II != IE; ++II) {
2305 assert(II->second != Inst && "Inst still in value numbering scope!");
2306 }
2307
2308 VNS = VNS->parent;
Bill Wendling3c793442008-12-22 22:14:07 +00002309 }
2310 }
Bill Wendling6b18a392008-12-22 21:36:08 +00002311}