blob: 732f6c823cc1742161c4aeb8812877d57e6845c2 [file] [log] [blame]
Chris Lattnerd2a653a2008-12-05 07:49:08 +00001//===- GVN.cpp - Eliminate redundant values and loads ---------------------===//
Owen Andersonab6ec2e2007-07-24 17:55:58 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Owen Andersonab6ec2e2007-07-24 17:55:58 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This pass performs global value numbering to eliminate fully redundant
11// instructions. It also performs simple dead load elimination.
12//
John Criswell073e4d12009-03-10 15:04:53 +000013// Note that this pass does the value numbering itself; it does not use the
Matthijs Kooijman5afc2742008-06-05 07:55:49 +000014// ValueNumbering analysis passes.
15//
Owen Andersonab6ec2e2007-07-24 17:55:58 +000016//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "gvn"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000019#include "llvm/Transforms/Scalar.h"
Owen Anderson5e5599b2007-07-25 19:57:03 +000020#include "llvm/BasicBlock.h"
Owen Andersondbf23cc2007-07-26 18:26:51 +000021#include "llvm/Constants.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000022#include "llvm/DerivedTypes.h"
Chris Lattner17079fc2009-12-28 21:28:46 +000023#include "llvm/GlobalVariable.h"
Owen Andersondbf23cc2007-07-26 18:26:51 +000024#include "llvm/Function.h"
Devang Patele8c6d312009-03-06 02:59:27 +000025#include "llvm/IntrinsicInst.h"
Owen Andersonb5618da2009-07-03 00:17:18 +000026#include "llvm/LLVMContext.h"
Chris Lattner0a9616d2009-09-21 05:57:11 +000027#include "llvm/Operator.h"
Owen Andersondbf23cc2007-07-26 18:26:51 +000028#include "llvm/Value.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000029#include "llvm/ADT/DenseMap.h"
30#include "llvm/ADT/DepthFirstIterator.h"
Owen Andersonbfe133e2008-12-15 02:03:00 +000031#include "llvm/ADT/PostOrderIterator.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000032#include "llvm/ADT/SmallPtrSet.h"
33#include "llvm/ADT/SmallVector.h"
34#include "llvm/ADT/Statistic.h"
Owen Anderson09b83ba2007-10-18 19:39:33 +000035#include "llvm/Analysis/AliasAnalysis.h"
Chris Lattner778cb922009-12-06 05:29:56 +000036#include "llvm/Analysis/ConstantFolding.h"
37#include "llvm/Analysis/Dominators.h"
Duncan Sands246b71c2010-11-12 21:10:24 +000038#include "llvm/Analysis/InstructionSimplify.h"
Dan Gohman826bdf82010-05-28 16:19:17 +000039#include "llvm/Analysis/Loads.h"
Victor Hernandezf390e042009-10-27 20:05:49 +000040#include "llvm/Analysis/MemoryBuiltins.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000041#include "llvm/Analysis/MemoryDependenceAnalysis.h"
Chris Lattner972e6d82009-12-09 01:59:31 +000042#include "llvm/Analysis/PHITransAddr.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000043#include "llvm/Support/CFG.h"
Owen Andersone780d662008-06-19 19:57:25 +000044#include "llvm/Support/CommandLine.h"
Chris Lattnerd528b212008-03-29 04:36:18 +000045#include "llvm/Support/Debug.h"
Torok Edwin56d06592009-07-11 20:10:48 +000046#include "llvm/Support/ErrorHandling.h"
Chris Lattner0a9616d2009-09-21 05:57:11 +000047#include "llvm/Support/GetElementPtrTypeIterator.h"
Chris Lattner42376062009-12-06 01:57:02 +000048#include "llvm/Support/IRBuilder.h"
Daniel Dunbar0dd5e1e2009-07-25 00:23:56 +000049#include "llvm/Support/raw_ostream.h"
Chris Lattner1dd48c32009-09-20 19:03:47 +000050#include "llvm/Target/TargetData.h"
Owen Andersonfdf9f162008-06-19 19:54:19 +000051#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Dale Johannesen81b64632009-06-17 20:48:23 +000052#include "llvm/Transforms/Utils/Local.h"
Chris Lattnerb6c65fa2009-10-10 23:50:30 +000053#include "llvm/Transforms/Utils/SSAUpdater.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000054using namespace llvm;
55
Bill Wendling3c793442008-12-22 22:14:07 +000056STATISTIC(NumGVNInstr, "Number of instructions deleted");
57STATISTIC(NumGVNLoad, "Number of loads deleted");
58STATISTIC(NumGVNPRE, "Number of instructions PRE'd");
Owen Anderson53d546e2008-07-15 16:28:06 +000059STATISTIC(NumGVNBlocks, "Number of blocks merged");
Bill Wendling3c793442008-12-22 22:14:07 +000060STATISTIC(NumPRELoad, "Number of loads PRE'd");
Chris Lattner168be762008-03-22 04:13:49 +000061
Evan Cheng9598f932008-06-20 01:01:07 +000062static cl::opt<bool> EnablePRE("enable-pre",
Owen Andersonaddbe3e2008-07-17 19:41:00 +000063 cl::init(true), cl::Hidden);
Dan Gohmana8f8a852009-06-15 18:30:15 +000064static cl::opt<bool> EnableLoadPRE("enable-load-pre", cl::init(true));
Owen Andersone780d662008-06-19 19:57:25 +000065
Owen Andersonab6ec2e2007-07-24 17:55:58 +000066//===----------------------------------------------------------------------===//
67// ValueTable Class
68//===----------------------------------------------------------------------===//
69
70/// This class holds the mapping between values and value numbers. It is used
71/// as an efficient mechanism to determine the expression-wise equivalence of
72/// two values.
73namespace {
Chris Lattner2dd09db2009-09-02 06:11:42 +000074 struct Expression {
Owen Andersoncdea3572010-01-17 19:33:27 +000075 enum ExpressionOpcode {
76 ADD = Instruction::Add,
77 FADD = Instruction::FAdd,
78 SUB = Instruction::Sub,
79 FSUB = Instruction::FSub,
80 MUL = Instruction::Mul,
81 FMUL = Instruction::FMul,
82 UDIV = Instruction::UDiv,
83 SDIV = Instruction::SDiv,
84 FDIV = Instruction::FDiv,
85 UREM = Instruction::URem,
86 SREM = Instruction::SRem,
87 FREM = Instruction::FRem,
88 SHL = Instruction::Shl,
89 LSHR = Instruction::LShr,
90 ASHR = Instruction::AShr,
91 AND = Instruction::And,
92 OR = Instruction::Or,
93 XOR = Instruction::Xor,
94 TRUNC = Instruction::Trunc,
95 ZEXT = Instruction::ZExt,
96 SEXT = Instruction::SExt,
97 FPTOUI = Instruction::FPToUI,
98 FPTOSI = Instruction::FPToSI,
99 UITOFP = Instruction::UIToFP,
100 SITOFP = Instruction::SIToFP,
101 FPTRUNC = Instruction::FPTrunc,
102 FPEXT = Instruction::FPExt,
103 PTRTOINT = Instruction::PtrToInt,
104 INTTOPTR = Instruction::IntToPtr,
105 BITCAST = Instruction::BitCast,
106 ICMPEQ, ICMPNE, ICMPUGT, ICMPUGE, ICMPULT, ICMPULE,
107 ICMPSGT, ICMPSGE, ICMPSLT, ICMPSLE, FCMPOEQ,
108 FCMPOGT, FCMPOGE, FCMPOLT, FCMPOLE, FCMPONE,
109 FCMPORD, FCMPUNO, FCMPUEQ, FCMPUGT, FCMPUGE,
110 FCMPULT, FCMPULE, FCMPUNE, EXTRACT, INSERT,
111 SHUFFLE, SELECT, GEP, CALL, CONSTANT,
112 INSERTVALUE, EXTRACTVALUE, EMPTY, TOMBSTONE };
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000113
114 ExpressionOpcode opcode;
115 const Type* type;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000116 SmallVector<uint32_t, 4> varargs;
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000117 Value *function;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000118
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000119 Expression() { }
120 Expression(ExpressionOpcode o) : opcode(o) { }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000121
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000122 bool operator==(const Expression &other) const {
123 if (opcode != other.opcode)
124 return false;
125 else if (opcode == EMPTY || opcode == TOMBSTONE)
126 return true;
127 else if (type != other.type)
128 return false;
Owen Anderson09b83ba2007-10-18 19:39:33 +0000129 else if (function != other.function)
130 return false;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000131 else {
132 if (varargs.size() != other.varargs.size())
133 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000134
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000135 for (size_t i = 0; i < varargs.size(); ++i)
136 if (varargs[i] != other.varargs[i])
137 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000138
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000139 return true;
140 }
141 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000142
Chris Lattner65b48b52010-09-04 18:12:00 +0000143 /*bool operator!=(const Expression &other) const {
Bill Wendling86f01cb2008-12-22 22:16:31 +0000144 return !(*this == other);
Chris Lattner65b48b52010-09-04 18:12:00 +0000145 }*/
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000146 };
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000147
Chris Lattner2dd09db2009-09-02 06:11:42 +0000148 class ValueTable {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000149 private:
150 DenseMap<Value*, uint32_t> valueNumbering;
151 DenseMap<Expression, uint32_t> expressionNumbering;
Owen Andersonf7928602008-05-12 20:15:55 +0000152 AliasAnalysis* AA;
153 MemoryDependenceAnalysis* MD;
154 DominatorTree* DT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000155
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000156 uint32_t nextValueNumber;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000157
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000158 Expression::ExpressionOpcode getOpcode(CmpInst* C);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000159 Expression create_expression(BinaryOperator* BO);
160 Expression create_expression(CmpInst* C);
161 Expression create_expression(ShuffleVectorInst* V);
162 Expression create_expression(ExtractElementInst* C);
163 Expression create_expression(InsertElementInst* V);
164 Expression create_expression(SelectInst* V);
165 Expression create_expression(CastInst* C);
166 Expression create_expression(GetElementPtrInst* G);
Owen Anderson09b83ba2007-10-18 19:39:33 +0000167 Expression create_expression(CallInst* C);
Owen Anderson168ad692009-10-19 22:14:22 +0000168 Expression create_expression(ExtractValueInst* C);
169 Expression create_expression(InsertValueInst* C);
170
171 uint32_t lookup_or_add_call(CallInst* C);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000172 public:
Dan Gohmanc4971722009-04-01 16:37:47 +0000173 ValueTable() : nextValueNumber(1) { }
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000174 uint32_t lookup_or_add(Value *V);
175 uint32_t lookup(Value *V) const;
176 void add(Value *V, uint32_t num);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000177 void clear();
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000178 void erase(Value *v);
Owen Andersonf7928602008-05-12 20:15:55 +0000179 void setAliasAnalysis(AliasAnalysis* A) { AA = A; }
Chris Lattner8541ede2008-12-01 00:40:32 +0000180 AliasAnalysis *getAliasAnalysis() const { return AA; }
Owen Andersonf7928602008-05-12 20:15:55 +0000181 void setMemDep(MemoryDependenceAnalysis* M) { MD = M; }
182 void setDomTree(DominatorTree* D) { DT = D; }
Owen Anderson3ea90a72008-07-03 17:44:33 +0000183 uint32_t getNextUnusedValueNumber() { return nextValueNumber; }
Bill Wendling6b18a392008-12-22 21:36:08 +0000184 void verifyRemoved(const Value *) const;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000185 };
186}
187
188namespace llvm {
Chris Lattner0625bd62007-09-17 18:34:04 +0000189template <> struct DenseMapInfo<Expression> {
Owen Anderson9699a6e2007-08-02 18:16:06 +0000190 static inline Expression getEmptyKey() {
191 return Expression(Expression::EMPTY);
192 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000193
Owen Anderson9699a6e2007-08-02 18:16:06 +0000194 static inline Expression getTombstoneKey() {
195 return Expression(Expression::TOMBSTONE);
196 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000197
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000198 static unsigned getHashValue(const Expression e) {
199 unsigned hash = e.opcode;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000200
Anton Korobeynikov1bfd1212008-02-20 11:26:25 +0000201 hash = ((unsigned)((uintptr_t)e.type >> 4) ^
Owen Anderson168ad692009-10-19 22:14:22 +0000202 (unsigned)((uintptr_t)e.type >> 9));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000203
Owen Anderson9699a6e2007-08-02 18:16:06 +0000204 for (SmallVector<uint32_t, 4>::const_iterator I = e.varargs.begin(),
205 E = e.varargs.end(); I != E; ++I)
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000206 hash = *I + hash * 37;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000207
Anton Korobeynikov1bfd1212008-02-20 11:26:25 +0000208 hash = ((unsigned)((uintptr_t)e.function >> 4) ^
209 (unsigned)((uintptr_t)e.function >> 9)) +
210 hash * 37;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000211
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000212 return hash;
213 }
Chris Lattner0625bd62007-09-17 18:34:04 +0000214 static bool isEqual(const Expression &LHS, const Expression &RHS) {
215 return LHS == RHS;
216 }
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000217};
Chris Lattner45d040b2009-12-15 07:26:43 +0000218
219template <>
220struct isPodLike<Expression> { static const bool value = true; };
221
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000222}
223
224//===----------------------------------------------------------------------===//
225// ValueTable Internal Functions
226//===----------------------------------------------------------------------===//
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000227
228Expression::ExpressionOpcode ValueTable::getOpcode(CmpInst* C) {
Nick Lewyckya21d3da2009-07-08 03:04:38 +0000229 if (isa<ICmpInst>(C)) {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000230 switch (C->getPredicate()) {
Chris Lattner2876a642008-03-21 21:14:38 +0000231 default: // THIS SHOULD NEVER HAPPEN
Torok Edwinfbcc6632009-07-14 16:55:14 +0000232 llvm_unreachable("Comparison with unknown predicate?");
Chris Lattner2876a642008-03-21 21:14:38 +0000233 case ICmpInst::ICMP_EQ: return Expression::ICMPEQ;
234 case ICmpInst::ICMP_NE: return Expression::ICMPNE;
235 case ICmpInst::ICMP_UGT: return Expression::ICMPUGT;
236 case ICmpInst::ICMP_UGE: return Expression::ICMPUGE;
237 case ICmpInst::ICMP_ULT: return Expression::ICMPULT;
238 case ICmpInst::ICMP_ULE: return Expression::ICMPULE;
239 case ICmpInst::ICMP_SGT: return Expression::ICMPSGT;
240 case ICmpInst::ICMP_SGE: return Expression::ICMPSGE;
241 case ICmpInst::ICMP_SLT: return Expression::ICMPSLT;
242 case ICmpInst::ICMP_SLE: return Expression::ICMPSLE;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000243 }
Nick Lewyckya21d3da2009-07-08 03:04:38 +0000244 } else {
245 switch (C->getPredicate()) {
246 default: // THIS SHOULD NEVER HAPPEN
Torok Edwinfbcc6632009-07-14 16:55:14 +0000247 llvm_unreachable("Comparison with unknown predicate?");
Nick Lewyckya21d3da2009-07-08 03:04:38 +0000248 case FCmpInst::FCMP_OEQ: return Expression::FCMPOEQ;
249 case FCmpInst::FCMP_OGT: return Expression::FCMPOGT;
250 case FCmpInst::FCMP_OGE: return Expression::FCMPOGE;
251 case FCmpInst::FCMP_OLT: return Expression::FCMPOLT;
252 case FCmpInst::FCMP_OLE: return Expression::FCMPOLE;
253 case FCmpInst::FCMP_ONE: return Expression::FCMPONE;
254 case FCmpInst::FCMP_ORD: return Expression::FCMPORD;
255 case FCmpInst::FCMP_UNO: return Expression::FCMPUNO;
256 case FCmpInst::FCMP_UEQ: return Expression::FCMPUEQ;
257 case FCmpInst::FCMP_UGT: return Expression::FCMPUGT;
258 case FCmpInst::FCMP_UGE: return Expression::FCMPUGE;
259 case FCmpInst::FCMP_ULT: return Expression::FCMPULT;
260 case FCmpInst::FCMP_ULE: return Expression::FCMPULE;
261 case FCmpInst::FCMP_UNE: return Expression::FCMPUNE;
262 }
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000263 }
264}
265
Owen Anderson09b83ba2007-10-18 19:39:33 +0000266Expression ValueTable::create_expression(CallInst* C) {
267 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000268
Owen Anderson09b83ba2007-10-18 19:39:33 +0000269 e.type = C->getType();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000270 e.function = C->getCalledFunction();
271 e.opcode = Expression::CALL;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000272
Gabor Greif5bcaa552010-06-24 10:04:07 +0000273 CallSite CS(C);
274 for (CallInst::op_iterator I = CS.arg_begin(), E = CS.arg_end();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000275 I != E; ++I)
Owen Anderson1e73f292008-04-11 05:11:49 +0000276 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000277
Owen Anderson09b83ba2007-10-18 19:39:33 +0000278 return e;
279}
280
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000281Expression ValueTable::create_expression(BinaryOperator* BO) {
282 Expression e;
Owen Anderson168ad692009-10-19 22:14:22 +0000283 e.varargs.push_back(lookup_or_add(BO->getOperand(0)));
284 e.varargs.push_back(lookup_or_add(BO->getOperand(1)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000285 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000286 e.type = BO->getType();
Owen Andersoncdea3572010-01-17 19:33:27 +0000287 e.opcode = static_cast<Expression::ExpressionOpcode>(BO->getOpcode());
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000288
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000289 return e;
290}
291
292Expression ValueTable::create_expression(CmpInst* C) {
293 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000294
Owen Anderson168ad692009-10-19 22:14:22 +0000295 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
296 e.varargs.push_back(lookup_or_add(C->getOperand(1)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000297 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000298 e.type = C->getType();
299 e.opcode = getOpcode(C);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000300
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000301 return e;
302}
303
304Expression ValueTable::create_expression(CastInst* C) {
305 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000306
Owen Anderson168ad692009-10-19 22:14:22 +0000307 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000308 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000309 e.type = C->getType();
Owen Andersoncdea3572010-01-17 19:33:27 +0000310 e.opcode = static_cast<Expression::ExpressionOpcode>(C->getOpcode());
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000311
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000312 return e;
313}
314
315Expression ValueTable::create_expression(ShuffleVectorInst* S) {
316 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000317
Owen Anderson168ad692009-10-19 22:14:22 +0000318 e.varargs.push_back(lookup_or_add(S->getOperand(0)));
319 e.varargs.push_back(lookup_or_add(S->getOperand(1)));
320 e.varargs.push_back(lookup_or_add(S->getOperand(2)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000321 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000322 e.type = S->getType();
323 e.opcode = Expression::SHUFFLE;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000324
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000325 return e;
326}
327
328Expression ValueTable::create_expression(ExtractElementInst* E) {
329 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000330
Owen Anderson168ad692009-10-19 22:14:22 +0000331 e.varargs.push_back(lookup_or_add(E->getOperand(0)));
332 e.varargs.push_back(lookup_or_add(E->getOperand(1)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000333 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000334 e.type = E->getType();
335 e.opcode = Expression::EXTRACT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000336
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000337 return e;
338}
339
340Expression ValueTable::create_expression(InsertElementInst* I) {
341 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000342
Owen Anderson168ad692009-10-19 22:14:22 +0000343 e.varargs.push_back(lookup_or_add(I->getOperand(0)));
344 e.varargs.push_back(lookup_or_add(I->getOperand(1)));
345 e.varargs.push_back(lookup_or_add(I->getOperand(2)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000346 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000347 e.type = I->getType();
348 e.opcode = Expression::INSERT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000349
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000350 return e;
351}
352
353Expression ValueTable::create_expression(SelectInst* I) {
354 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000355
Owen Anderson168ad692009-10-19 22:14:22 +0000356 e.varargs.push_back(lookup_or_add(I->getCondition()));
357 e.varargs.push_back(lookup_or_add(I->getTrueValue()));
358 e.varargs.push_back(lookup_or_add(I->getFalseValue()));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000359 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000360 e.type = I->getType();
361 e.opcode = Expression::SELECT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000362
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000363 return e;
364}
365
366Expression ValueTable::create_expression(GetElementPtrInst* G) {
367 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000368
Owen Anderson168ad692009-10-19 22:14:22 +0000369 e.varargs.push_back(lookup_or_add(G->getPointerOperand()));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000370 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000371 e.type = G->getType();
372 e.opcode = Expression::GEP;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000373
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000374 for (GetElementPtrInst::op_iterator I = G->idx_begin(), E = G->idx_end();
375 I != E; ++I)
Owen Anderson1e73f292008-04-11 05:11:49 +0000376 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000377
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000378 return e;
379}
380
Owen Anderson168ad692009-10-19 22:14:22 +0000381Expression ValueTable::create_expression(ExtractValueInst* E) {
382 Expression e;
383
384 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
385 for (ExtractValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
386 II != IE; ++II)
387 e.varargs.push_back(*II);
388 e.function = 0;
389 e.type = E->getType();
390 e.opcode = Expression::EXTRACTVALUE;
391
392 return e;
393}
394
395Expression ValueTable::create_expression(InsertValueInst* E) {
396 Expression e;
397
398 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
399 e.varargs.push_back(lookup_or_add(E->getInsertedValueOperand()));
400 for (InsertValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
401 II != IE; ++II)
402 e.varargs.push_back(*II);
403 e.function = 0;
404 e.type = E->getType();
405 e.opcode = Expression::INSERTVALUE;
406
407 return e;
408}
409
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000410//===----------------------------------------------------------------------===//
411// ValueTable External Functions
412//===----------------------------------------------------------------------===//
413
Owen Anderson6a903bc2008-06-18 21:41:49 +0000414/// add - Insert a value into the table with a specified value number.
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000415void ValueTable::add(Value *V, uint32_t num) {
Owen Anderson6a903bc2008-06-18 21:41:49 +0000416 valueNumbering.insert(std::make_pair(V, num));
417}
418
Owen Anderson168ad692009-10-19 22:14:22 +0000419uint32_t ValueTable::lookup_or_add_call(CallInst* C) {
420 if (AA->doesNotAccessMemory(C)) {
421 Expression exp = create_expression(C);
422 uint32_t& e = expressionNumbering[exp];
423 if (!e) e = nextValueNumber++;
424 valueNumbering[C] = e;
425 return e;
426 } else if (AA->onlyReadsMemory(C)) {
427 Expression exp = create_expression(C);
428 uint32_t& e = expressionNumbering[exp];
429 if (!e) {
430 e = nextValueNumber++;
431 valueNumbering[C] = e;
432 return e;
433 }
Dan Gohman81132462009-11-14 02:27:51 +0000434 if (!MD) {
435 e = nextValueNumber++;
436 valueNumbering[C] = e;
437 return e;
438 }
Owen Anderson168ad692009-10-19 22:14:22 +0000439
440 MemDepResult local_dep = MD->getDependency(C);
441
442 if (!local_dep.isDef() && !local_dep.isNonLocal()) {
443 valueNumbering[C] = nextValueNumber;
444 return nextValueNumber++;
445 }
446
447 if (local_dep.isDef()) {
448 CallInst* local_cdep = cast<CallInst>(local_dep.getInst());
449
Gabor Greiff628ecd2010-06-30 09:17:53 +0000450 if (local_cdep->getNumArgOperands() != C->getNumArgOperands()) {
Owen Anderson168ad692009-10-19 22:14:22 +0000451 valueNumbering[C] = nextValueNumber;
452 return nextValueNumber++;
453 }
454
Gabor Greif2d958d42010-06-24 10:17:17 +0000455 for (unsigned i = 0, e = C->getNumArgOperands(); i < e; ++i) {
456 uint32_t c_vn = lookup_or_add(C->getArgOperand(i));
457 uint32_t cd_vn = lookup_or_add(local_cdep->getArgOperand(i));
Owen Anderson168ad692009-10-19 22:14:22 +0000458 if (c_vn != cd_vn) {
459 valueNumbering[C] = nextValueNumber;
460 return nextValueNumber++;
461 }
462 }
463
464 uint32_t v = lookup_or_add(local_cdep);
465 valueNumbering[C] = v;
466 return v;
467 }
468
469 // Non-local case.
470 const MemoryDependenceAnalysis::NonLocalDepInfo &deps =
471 MD->getNonLocalCallDependency(CallSite(C));
472 // FIXME: call/call dependencies for readonly calls should return def, not
473 // clobber! Move the checking logic to MemDep!
474 CallInst* cdep = 0;
475
476 // Check to see if we have a single dominating call instruction that is
477 // identical to C.
478 for (unsigned i = 0, e = deps.size(); i != e; ++i) {
Chris Lattner0c315472009-12-09 07:08:01 +0000479 const NonLocalDepEntry *I = &deps[i];
Owen Anderson168ad692009-10-19 22:14:22 +0000480 // Ignore non-local dependencies.
Chris Lattner0c315472009-12-09 07:08:01 +0000481 if (I->getResult().isNonLocal())
Owen Anderson168ad692009-10-19 22:14:22 +0000482 continue;
483
484 // We don't handle non-depedencies. If we already have a call, reject
485 // instruction dependencies.
Chris Lattner0c315472009-12-09 07:08:01 +0000486 if (I->getResult().isClobber() || cdep != 0) {
Owen Anderson168ad692009-10-19 22:14:22 +0000487 cdep = 0;
488 break;
489 }
490
Chris Lattner0c315472009-12-09 07:08:01 +0000491 CallInst *NonLocalDepCall = dyn_cast<CallInst>(I->getResult().getInst());
Owen Anderson168ad692009-10-19 22:14:22 +0000492 // FIXME: All duplicated with non-local case.
Chris Lattner0c315472009-12-09 07:08:01 +0000493 if (NonLocalDepCall && DT->properlyDominates(I->getBB(), C->getParent())){
Owen Anderson168ad692009-10-19 22:14:22 +0000494 cdep = NonLocalDepCall;
495 continue;
496 }
497
498 cdep = 0;
499 break;
500 }
501
502 if (!cdep) {
503 valueNumbering[C] = nextValueNumber;
504 return nextValueNumber++;
505 }
506
Gabor Greiff628ecd2010-06-30 09:17:53 +0000507 if (cdep->getNumArgOperands() != C->getNumArgOperands()) {
Owen Anderson168ad692009-10-19 22:14:22 +0000508 valueNumbering[C] = nextValueNumber;
509 return nextValueNumber++;
510 }
Gabor Greif2d958d42010-06-24 10:17:17 +0000511 for (unsigned i = 0, e = C->getNumArgOperands(); i < e; ++i) {
512 uint32_t c_vn = lookup_or_add(C->getArgOperand(i));
513 uint32_t cd_vn = lookup_or_add(cdep->getArgOperand(i));
Owen Anderson168ad692009-10-19 22:14:22 +0000514 if (c_vn != cd_vn) {
515 valueNumbering[C] = nextValueNumber;
516 return nextValueNumber++;
517 }
518 }
519
520 uint32_t v = lookup_or_add(cdep);
521 valueNumbering[C] = v;
522 return v;
523
524 } else {
525 valueNumbering[C] = nextValueNumber;
526 return nextValueNumber++;
527 }
528}
529
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000530/// lookup_or_add - Returns the value number for the specified value, assigning
531/// it a new number if it did not have one before.
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000532uint32_t ValueTable::lookup_or_add(Value *V) {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000533 DenseMap<Value*, uint32_t>::iterator VI = valueNumbering.find(V);
534 if (VI != valueNumbering.end())
535 return VI->second;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000536
Owen Anderson168ad692009-10-19 22:14:22 +0000537 if (!isa<Instruction>(V)) {
Owen Anderson1059b5b2009-10-19 21:14:57 +0000538 valueNumbering[V] = nextValueNumber;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000539 return nextValueNumber++;
540 }
Owen Anderson168ad692009-10-19 22:14:22 +0000541
542 Instruction* I = cast<Instruction>(V);
543 Expression exp;
544 switch (I->getOpcode()) {
545 case Instruction::Call:
546 return lookup_or_add_call(cast<CallInst>(I));
547 case Instruction::Add:
548 case Instruction::FAdd:
549 case Instruction::Sub:
550 case Instruction::FSub:
551 case Instruction::Mul:
552 case Instruction::FMul:
553 case Instruction::UDiv:
554 case Instruction::SDiv:
555 case Instruction::FDiv:
556 case Instruction::URem:
557 case Instruction::SRem:
558 case Instruction::FRem:
559 case Instruction::Shl:
560 case Instruction::LShr:
561 case Instruction::AShr:
562 case Instruction::And:
563 case Instruction::Or :
564 case Instruction::Xor:
565 exp = create_expression(cast<BinaryOperator>(I));
566 break;
567 case Instruction::ICmp:
568 case Instruction::FCmp:
569 exp = create_expression(cast<CmpInst>(I));
570 break;
571 case Instruction::Trunc:
572 case Instruction::ZExt:
573 case Instruction::SExt:
574 case Instruction::FPToUI:
575 case Instruction::FPToSI:
576 case Instruction::UIToFP:
577 case Instruction::SIToFP:
578 case Instruction::FPTrunc:
579 case Instruction::FPExt:
580 case Instruction::PtrToInt:
581 case Instruction::IntToPtr:
582 case Instruction::BitCast:
583 exp = create_expression(cast<CastInst>(I));
584 break;
585 case Instruction::Select:
586 exp = create_expression(cast<SelectInst>(I));
587 break;
588 case Instruction::ExtractElement:
589 exp = create_expression(cast<ExtractElementInst>(I));
590 break;
591 case Instruction::InsertElement:
592 exp = create_expression(cast<InsertElementInst>(I));
593 break;
594 case Instruction::ShuffleVector:
595 exp = create_expression(cast<ShuffleVectorInst>(I));
596 break;
597 case Instruction::ExtractValue:
598 exp = create_expression(cast<ExtractValueInst>(I));
599 break;
600 case Instruction::InsertValue:
601 exp = create_expression(cast<InsertValueInst>(I));
602 break;
603 case Instruction::GetElementPtr:
604 exp = create_expression(cast<GetElementPtrInst>(I));
605 break;
606 default:
607 valueNumbering[V] = nextValueNumber;
608 return nextValueNumber++;
609 }
610
611 uint32_t& e = expressionNumbering[exp];
612 if (!e) e = nextValueNumber++;
613 valueNumbering[V] = e;
614 return e;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000615}
616
617/// lookup - Returns the value number of the specified value. Fails if
618/// the value has not yet been numbered.
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000619uint32_t ValueTable::lookup(Value *V) const {
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +0000620 DenseMap<Value*, uint32_t>::const_iterator VI = valueNumbering.find(V);
Chris Lattner2876a642008-03-21 21:14:38 +0000621 assert(VI != valueNumbering.end() && "Value not numbered?");
622 return VI->second;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000623}
624
625/// clear - Remove all entries from the ValueTable
626void ValueTable::clear() {
627 valueNumbering.clear();
628 expressionNumbering.clear();
629 nextValueNumber = 1;
630}
631
Owen Anderson10ffa862007-07-31 23:27:13 +0000632/// erase - Remove a value from the value numbering
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000633void ValueTable::erase(Value *V) {
Owen Anderson10ffa862007-07-31 23:27:13 +0000634 valueNumbering.erase(V);
635}
636
Bill Wendling6b18a392008-12-22 21:36:08 +0000637/// verifyRemoved - Verify that the value is removed from all internal data
638/// structures.
639void ValueTable::verifyRemoved(const Value *V) const {
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +0000640 for (DenseMap<Value*, uint32_t>::const_iterator
Bill Wendling6b18a392008-12-22 21:36:08 +0000641 I = valueNumbering.begin(), E = valueNumbering.end(); I != E; ++I) {
642 assert(I->first != V && "Inst still occurs in value numbering map!");
643 }
644}
645
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000646//===----------------------------------------------------------------------===//
Bill Wendling456e8852008-12-22 22:32:22 +0000647// GVN Pass
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000648//===----------------------------------------------------------------------===//
649
650namespace {
Chris Lattner2dd09db2009-09-02 06:11:42 +0000651 struct ValueNumberScope {
Owen Anderson1b3ea962008-06-20 01:15:47 +0000652 ValueNumberScope* parent;
653 DenseMap<uint32_t, Value*> table;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000654
Owen Anderson1b3ea962008-06-20 01:15:47 +0000655 ValueNumberScope(ValueNumberScope* p) : parent(p) { }
656 };
657}
658
659namespace {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000660
Chris Lattner2dd09db2009-09-02 06:11:42 +0000661 class GVN : public FunctionPass {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000662 bool runOnFunction(Function &F);
663 public:
664 static char ID; // Pass identification, replacement for typeid
Bob Wilson11361662010-02-28 05:34:05 +0000665 explicit GVN(bool noloads = false)
Owen Anderson6c18d1a2010-10-19 17:21:58 +0000666 : FunctionPass(ID), NoLoads(noloads), MD(0) {
667 initializeGVNPass(*PassRegistry::getPassRegistry());
668 }
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000669
670 private:
Dan Gohman81132462009-11-14 02:27:51 +0000671 bool NoLoads;
Chris Lattner8541ede2008-12-01 00:40:32 +0000672 MemoryDependenceAnalysis *MD;
673 DominatorTree *DT;
Duncan Sands246b71c2010-11-12 21:10:24 +0000674 const TargetData* TD;
Chris Lattner8541ede2008-12-01 00:40:32 +0000675
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000676 ValueTable VN;
Owen Anderson1b3ea962008-06-20 01:15:47 +0000677 DenseMap<BasicBlock*, ValueNumberScope*> localAvail;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000678
Bob Wilson92cdb6e2010-02-16 19:51:59 +0000679 // List of critical edges to be split between iterations.
680 SmallVector<std::pair<TerminatorInst*, unsigned>, 4> toSplit;
681
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000682 // This transformation requires dominator postdominator info
683 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000684 AU.addRequired<DominatorTree>();
Dan Gohman81132462009-11-14 02:27:51 +0000685 if (!NoLoads)
686 AU.addRequired<MemoryDependenceAnalysis>();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000687 AU.addRequired<AliasAnalysis>();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000688
Owen Anderson54e02192008-06-23 17:49:45 +0000689 AU.addPreserved<DominatorTree>();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000690 AU.addPreserved<AliasAnalysis>();
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000691 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000692
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000693 // Helper fuctions
694 // FIXME: eliminate or document these better
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000695 bool processLoad(LoadInst* L,
Chris Lattner804209d2008-03-21 22:01:16 +0000696 SmallVectorImpl<Instruction*> &toErase);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000697 bool processInstruction(Instruction *I,
Chris Lattner804209d2008-03-21 22:01:16 +0000698 SmallVectorImpl<Instruction*> &toErase);
Owen Anderson9699a6e2007-08-02 18:16:06 +0000699 bool processNonLocalLoad(LoadInst* L,
Chris Lattner804209d2008-03-21 22:01:16 +0000700 SmallVectorImpl<Instruction*> &toErase);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000701 bool processBlock(BasicBlock *BB);
Owen Anderson6a903bc2008-06-18 21:41:49 +0000702 void dump(DenseMap<uint32_t, Value*>& d);
Owen Anderson676070d2007-08-14 18:04:11 +0000703 bool iterateOnFunction(Function &F);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000704 Value *CollapsePhi(PHINode* p);
Owen Anderson6a903bc2008-06-18 21:41:49 +0000705 bool performPRE(Function& F);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000706 Value *lookupNumber(BasicBlock *BB, uint32_t num);
Nuno Lopese3127f32008-10-10 16:25:50 +0000707 void cleanupGlobalSets();
Bill Wendling6b18a392008-12-22 21:36:08 +0000708 void verifyRemoved(const Instruction *I) const;
Bob Wilson92cdb6e2010-02-16 19:51:59 +0000709 bool splitCriticalEdges();
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000710 };
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000711
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000712 char GVN::ID = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000713}
714
715// createGVNPass - The public interface to this file...
Bob Wilson11361662010-02-28 05:34:05 +0000716FunctionPass *llvm::createGVNPass(bool NoLoads) {
717 return new GVN(NoLoads);
Dan Gohman81132462009-11-14 02:27:51 +0000718}
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000719
Owen Anderson8ac477f2010-10-12 19:48:12 +0000720INITIALIZE_PASS_BEGIN(GVN, "gvn", "Global Value Numbering", false, false)
721INITIALIZE_PASS_DEPENDENCY(MemoryDependenceAnalysis)
722INITIALIZE_PASS_DEPENDENCY(DominatorTree)
723INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
724INITIALIZE_PASS_END(GVN, "gvn", "Global Value Numbering", false, false)
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000725
Owen Anderson6a903bc2008-06-18 21:41:49 +0000726void GVN::dump(DenseMap<uint32_t, Value*>& d) {
Dan Gohman57e80862009-12-18 03:25:51 +0000727 errs() << "{\n";
Owen Anderson6a903bc2008-06-18 21:41:49 +0000728 for (DenseMap<uint32_t, Value*>::iterator I = d.begin(),
Owen Anderson5e5599b2007-07-25 19:57:03 +0000729 E = d.end(); I != E; ++I) {
Dan Gohman57e80862009-12-18 03:25:51 +0000730 errs() << I->first << "\n";
Owen Anderson5e5599b2007-07-25 19:57:03 +0000731 I->second->dump();
732 }
Dan Gohman57e80862009-12-18 03:25:51 +0000733 errs() << "}\n";
Owen Anderson5e5599b2007-07-25 19:57:03 +0000734}
735
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000736static bool isSafeReplacement(PHINode* p, Instruction *inst) {
Owen Anderson109ca5a2009-08-26 22:55:11 +0000737 if (!isa<PHINode>(inst))
738 return true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000739
Owen Anderson109ca5a2009-08-26 22:55:11 +0000740 for (Instruction::use_iterator UI = p->use_begin(), E = p->use_end();
741 UI != E; ++UI)
Gabor Greifdde79d82010-07-22 13:36:47 +0000742 if (PHINode* use_phi = dyn_cast<PHINode>(*UI))
Owen Anderson109ca5a2009-08-26 22:55:11 +0000743 if (use_phi->getParent() == inst->getParent())
744 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000745
Owen Anderson109ca5a2009-08-26 22:55:11 +0000746 return true;
747}
748
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000749Value *GVN::CollapsePhi(PHINode *PN) {
750 Value *ConstVal = PN->hasConstantValue(DT);
751 if (!ConstVal) return 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000752
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000753 Instruction *Inst = dyn_cast<Instruction>(ConstVal);
754 if (!Inst)
755 return ConstVal;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000756
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000757 if (DT->dominates(Inst, PN))
758 if (isSafeReplacement(PN, Inst))
759 return Inst;
Owen Andersonf5023a72007-08-16 22:51:56 +0000760 return 0;
761}
Owen Anderson5e5599b2007-07-25 19:57:03 +0000762
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000763/// IsValueFullyAvailableInBlock - Return true if we can prove that the value
764/// we're analyzing is fully available in the specified block. As we go, keep
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000765/// track of which blocks we know are fully alive in FullyAvailableBlocks. This
766/// map is actually a tri-state map with the following values:
767/// 0) we know the block *is not* fully available.
768/// 1) we know the block *is* fully available.
769/// 2) we do not know whether the block is fully available or not, but we are
770/// currently speculating that it will be.
771/// 3) we are speculating for this block and have used that to speculate for
772/// other blocks.
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000773static bool IsValueFullyAvailableInBlock(BasicBlock *BB,
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000774 DenseMap<BasicBlock*, char> &FullyAvailableBlocks) {
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000775 // Optimistically assume that the block is fully available and check to see
776 // if we already know about this block in one lookup.
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000777 std::pair<DenseMap<BasicBlock*, char>::iterator, char> IV =
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000778 FullyAvailableBlocks.insert(std::make_pair(BB, 2));
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000779
780 // If the entry already existed for this block, return the precomputed value.
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000781 if (!IV.second) {
782 // If this is a speculative "available" value, mark it as being used for
783 // speculation of other blocks.
784 if (IV.first->second == 2)
785 IV.first->second = 3;
786 return IV.first->second != 0;
787 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000788
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000789 // Otherwise, see if it is fully available in all predecessors.
790 pred_iterator PI = pred_begin(BB), PE = pred_end(BB);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000791
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000792 // If this block has no predecessors, it isn't live-in here.
793 if (PI == PE)
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000794 goto SpeculationFailure;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000795
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000796 for (; PI != PE; ++PI)
797 // If the value isn't fully available in one of our predecessors, then it
798 // isn't fully available in this block either. Undo our previous
799 // optimistic assumption and bail out.
800 if (!IsValueFullyAvailableInBlock(*PI, FullyAvailableBlocks))
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000801 goto SpeculationFailure;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000802
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000803 return true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000804
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000805// SpeculationFailure - If we get here, we found out that this is not, after
806// all, a fully-available block. We have a problem if we speculated on this and
807// used the speculation to mark other blocks as available.
808SpeculationFailure:
809 char &BBVal = FullyAvailableBlocks[BB];
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000810
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000811 // If we didn't speculate on this, just return with it set to false.
812 if (BBVal == 2) {
813 BBVal = 0;
814 return false;
815 }
816
817 // If we did speculate on this value, we could have blocks set to 1 that are
818 // incorrect. Walk the (transitive) successors of this block and mark them as
819 // 0 if set to one.
820 SmallVector<BasicBlock*, 32> BBWorklist;
821 BBWorklist.push_back(BB);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000822
Dan Gohman28943872010-01-05 16:27:25 +0000823 do {
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000824 BasicBlock *Entry = BBWorklist.pop_back_val();
825 // Note that this sets blocks to 0 (unavailable) if they happen to not
826 // already be in FullyAvailableBlocks. This is safe.
827 char &EntryVal = FullyAvailableBlocks[Entry];
828 if (EntryVal == 0) continue; // Already unavailable.
829
830 // Mark as unavailable.
831 EntryVal = 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000832
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000833 for (succ_iterator I = succ_begin(Entry), E = succ_end(Entry); I != E; ++I)
834 BBWorklist.push_back(*I);
Dan Gohman28943872010-01-05 16:27:25 +0000835 } while (!BBWorklist.empty());
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000836
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000837 return false;
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000838}
839
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000840
Chris Lattner9045f232009-09-21 17:24:04 +0000841/// CanCoerceMustAliasedValueToLoad - Return true if
842/// CoerceAvailableValueToLoadType will succeed.
843static bool CanCoerceMustAliasedValueToLoad(Value *StoredVal,
844 const Type *LoadTy,
845 const TargetData &TD) {
846 // If the loaded or stored value is an first class array or struct, don't try
847 // to transform them. We need to be able to bitcast to integer.
Duncan Sands19d0b472010-02-16 11:11:14 +0000848 if (LoadTy->isStructTy() || LoadTy->isArrayTy() ||
849 StoredVal->getType()->isStructTy() ||
850 StoredVal->getType()->isArrayTy())
Chris Lattner9045f232009-09-21 17:24:04 +0000851 return false;
852
853 // The store has to be at least as big as the load.
854 if (TD.getTypeSizeInBits(StoredVal->getType()) <
855 TD.getTypeSizeInBits(LoadTy))
856 return false;
857
858 return true;
859}
860
861
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000862/// CoerceAvailableValueToLoadType - If we saw a store of a value to memory, and
863/// then a load from a must-aliased pointer of a different type, try to coerce
864/// the stored value. LoadedTy is the type of the load we want to replace and
865/// InsertPt is the place to insert new instructions.
866///
867/// If we can't do it, return null.
868static Value *CoerceAvailableValueToLoadType(Value *StoredVal,
869 const Type *LoadedTy,
870 Instruction *InsertPt,
871 const TargetData &TD) {
Chris Lattner9045f232009-09-21 17:24:04 +0000872 if (!CanCoerceMustAliasedValueToLoad(StoredVal, LoadedTy, TD))
873 return 0;
874
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000875 const Type *StoredValTy = StoredVal->getType();
876
Chris Lattner5a62d6e2010-05-08 20:01:44 +0000877 uint64_t StoreSize = TD.getTypeStoreSizeInBits(StoredValTy);
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000878 uint64_t LoadSize = TD.getTypeSizeInBits(LoadedTy);
879
880 // If the store and reload are the same size, we can always reuse it.
881 if (StoreSize == LoadSize) {
Duncan Sands19d0b472010-02-16 11:11:14 +0000882 if (StoredValTy->isPointerTy() && LoadedTy->isPointerTy()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000883 // Pointer to Pointer -> use bitcast.
884 return new BitCastInst(StoredVal, LoadedTy, "", InsertPt);
885 }
886
887 // Convert source pointers to integers, which can be bitcast.
Duncan Sands19d0b472010-02-16 11:11:14 +0000888 if (StoredValTy->isPointerTy()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000889 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
890 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
891 }
892
893 const Type *TypeToCastTo = LoadedTy;
Duncan Sands19d0b472010-02-16 11:11:14 +0000894 if (TypeToCastTo->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000895 TypeToCastTo = TD.getIntPtrType(StoredValTy->getContext());
896
897 if (StoredValTy != TypeToCastTo)
898 StoredVal = new BitCastInst(StoredVal, TypeToCastTo, "", InsertPt);
899
900 // Cast to pointer if the load needs a pointer type.
Duncan Sands19d0b472010-02-16 11:11:14 +0000901 if (LoadedTy->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000902 StoredVal = new IntToPtrInst(StoredVal, LoadedTy, "", InsertPt);
903
904 return StoredVal;
905 }
906
907 // If the loaded value is smaller than the available value, then we can
908 // extract out a piece from it. If the available value is too small, then we
909 // can't do anything.
Chris Lattner9045f232009-09-21 17:24:04 +0000910 assert(StoreSize >= LoadSize && "CanCoerceMustAliasedValueToLoad fail");
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000911
912 // Convert source pointers to integers, which can be manipulated.
Duncan Sands19d0b472010-02-16 11:11:14 +0000913 if (StoredValTy->isPointerTy()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000914 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
915 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
916 }
917
918 // Convert vectors and fp to integer, which can be manipulated.
Duncan Sands19d0b472010-02-16 11:11:14 +0000919 if (!StoredValTy->isIntegerTy()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000920 StoredValTy = IntegerType::get(StoredValTy->getContext(), StoreSize);
921 StoredVal = new BitCastInst(StoredVal, StoredValTy, "", InsertPt);
922 }
923
924 // If this is a big-endian system, we need to shift the value down to the low
925 // bits so that a truncate will work.
926 if (TD.isBigEndian()) {
927 Constant *Val = ConstantInt::get(StoredVal->getType(), StoreSize-LoadSize);
928 StoredVal = BinaryOperator::CreateLShr(StoredVal, Val, "tmp", InsertPt);
929 }
930
931 // Truncate the integer to the right size now.
932 const Type *NewIntTy = IntegerType::get(StoredValTy->getContext(), LoadSize);
933 StoredVal = new TruncInst(StoredVal, NewIntTy, "trunc", InsertPt);
934
935 if (LoadedTy == NewIntTy)
936 return StoredVal;
937
938 // If the result is a pointer, inttoptr.
Duncan Sands19d0b472010-02-16 11:11:14 +0000939 if (LoadedTy->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000940 return new IntToPtrInst(StoredVal, LoadedTy, "inttoptr", InsertPt);
941
942 // Otherwise, bitcast.
943 return new BitCastInst(StoredVal, LoadedTy, "bitcast", InsertPt);
944}
945
Chris Lattnerd28f9082009-09-21 06:24:16 +0000946/// GetBaseWithConstantOffset - Analyze the specified pointer to see if it can
947/// be expressed as a base pointer plus a constant offset. Return the base and
948/// offset to the caller.
949static Value *GetBaseWithConstantOffset(Value *Ptr, int64_t &Offset,
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000950 const TargetData &TD) {
Chris Lattnerd28f9082009-09-21 06:24:16 +0000951 Operator *PtrOp = dyn_cast<Operator>(Ptr);
952 if (PtrOp == 0) return Ptr;
953
954 // Just look through bitcasts.
955 if (PtrOp->getOpcode() == Instruction::BitCast)
956 return GetBaseWithConstantOffset(PtrOp->getOperand(0), Offset, TD);
957
958 // If this is a GEP with constant indices, we can look through it.
959 GEPOperator *GEP = dyn_cast<GEPOperator>(PtrOp);
960 if (GEP == 0 || !GEP->hasAllConstantIndices()) return Ptr;
961
962 gep_type_iterator GTI = gep_type_begin(GEP);
963 for (User::op_iterator I = GEP->idx_begin(), E = GEP->idx_end(); I != E;
964 ++I, ++GTI) {
965 ConstantInt *OpC = cast<ConstantInt>(*I);
966 if (OpC->isZero()) continue;
967
968 // Handle a struct and array indices which add their offset to the pointer.
969 if (const StructType *STy = dyn_cast<StructType>(*GTI)) {
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000970 Offset += TD.getStructLayout(STy)->getElementOffset(OpC->getZExtValue());
Chris Lattnerd28f9082009-09-21 06:24:16 +0000971 } else {
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000972 uint64_t Size = TD.getTypeAllocSize(GTI.getIndexedType());
Chris Lattnerd28f9082009-09-21 06:24:16 +0000973 Offset += OpC->getSExtValue()*Size;
974 }
975 }
976
977 // Re-sign extend from the pointer size if needed to get overflow edge cases
978 // right.
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000979 unsigned PtrSize = TD.getPointerSizeInBits();
Chris Lattnerd28f9082009-09-21 06:24:16 +0000980 if (PtrSize < 64)
981 Offset = (Offset << (64-PtrSize)) >> (64-PtrSize);
982
983 return GetBaseWithConstantOffset(GEP->getPointerOperand(), Offset, TD);
984}
985
986
Chris Lattner42376062009-12-06 01:57:02 +0000987/// AnalyzeLoadFromClobberingWrite - This function is called when we have a
988/// memdep query of a load that ends up being a clobbering memory write (store,
989/// memset, memcpy, memmove). This means that the write *may* provide bits used
990/// by the load but we can't be sure because the pointers don't mustalias.
991///
992/// Check this case to see if there is anything more we can do before we give
993/// up. This returns -1 if we have to give up, or a byte number in the stored
994/// value of the piece that feeds the load.
Chris Lattner0def8612009-12-09 07:34:10 +0000995static int AnalyzeLoadFromClobberingWrite(const Type *LoadTy, Value *LoadPtr,
996 Value *WritePtr,
Chris Lattner42376062009-12-06 01:57:02 +0000997 uint64_t WriteSizeInBits,
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000998 const TargetData &TD) {
Chris Lattner9045f232009-09-21 17:24:04 +0000999 // If the loaded or stored value is an first class array or struct, don't try
1000 // to transform them. We need to be able to bitcast to integer.
Duncan Sands19d0b472010-02-16 11:11:14 +00001001 if (LoadTy->isStructTy() || LoadTy->isArrayTy())
Chris Lattner9045f232009-09-21 17:24:04 +00001002 return -1;
1003
Chris Lattnerd28f9082009-09-21 06:24:16 +00001004 int64_t StoreOffset = 0, LoadOffset = 0;
Chris Lattner42376062009-12-06 01:57:02 +00001005 Value *StoreBase = GetBaseWithConstantOffset(WritePtr, StoreOffset, TD);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001006 Value *LoadBase =
Chris Lattner0def8612009-12-09 07:34:10 +00001007 GetBaseWithConstantOffset(LoadPtr, LoadOffset, TD);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001008 if (StoreBase != LoadBase)
1009 return -1;
1010
1011 // If the load and store are to the exact same address, they should have been
1012 // a must alias. AA must have gotten confused.
Chris Lattner05638042010-03-25 05:58:19 +00001013 // FIXME: Study to see if/when this happens. One case is forwarding a memset
1014 // to a load from the base of the memset.
Chris Lattnerd28f9082009-09-21 06:24:16 +00001015#if 0
Chris Lattner05638042010-03-25 05:58:19 +00001016 if (LoadOffset == StoreOffset) {
David Greene2e6efc42010-01-05 01:27:17 +00001017 dbgs() << "STORE/LOAD DEP WITH COMMON POINTER MISSED:\n"
Chris Lattnerd28f9082009-09-21 06:24:16 +00001018 << "Base = " << *StoreBase << "\n"
Chris Lattner42376062009-12-06 01:57:02 +00001019 << "Store Ptr = " << *WritePtr << "\n"
1020 << "Store Offs = " << StoreOffset << "\n"
Chris Lattner3ddf8042009-12-10 00:04:46 +00001021 << "Load Ptr = " << *LoadPtr << "\n";
Chris Lattner946b58d2009-12-09 02:41:54 +00001022 abort();
Chris Lattnerd28f9082009-09-21 06:24:16 +00001023 }
Chris Lattner05638042010-03-25 05:58:19 +00001024#endif
Chris Lattnerd28f9082009-09-21 06:24:16 +00001025
1026 // If the load and store don't overlap at all, the store doesn't provide
1027 // anything to the load. In this case, they really don't alias at all, AA
1028 // must have gotten confused.
1029 // FIXME: Investigate cases where this bails out, e.g. rdar://7238614. Then
1030 // remove this check, as it is duplicated with what we have below.
Chris Lattner0def8612009-12-09 07:34:10 +00001031 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001032
Chris Lattner42376062009-12-06 01:57:02 +00001033 if ((WriteSizeInBits & 7) | (LoadSize & 7))
Chris Lattnerd28f9082009-09-21 06:24:16 +00001034 return -1;
Chris Lattner42376062009-12-06 01:57:02 +00001035 uint64_t StoreSize = WriteSizeInBits >> 3; // Convert to bytes.
Chris Lattnerd28f9082009-09-21 06:24:16 +00001036 LoadSize >>= 3;
1037
1038
1039 bool isAAFailure = false;
Chris Lattner05638042010-03-25 05:58:19 +00001040 if (StoreOffset < LoadOffset)
Chris Lattnerd28f9082009-09-21 06:24:16 +00001041 isAAFailure = StoreOffset+int64_t(StoreSize) <= LoadOffset;
Chris Lattner05638042010-03-25 05:58:19 +00001042 else
Chris Lattnerd28f9082009-09-21 06:24:16 +00001043 isAAFailure = LoadOffset+int64_t(LoadSize) <= StoreOffset;
Chris Lattner05638042010-03-25 05:58:19 +00001044
Chris Lattnerd28f9082009-09-21 06:24:16 +00001045 if (isAAFailure) {
1046#if 0
David Greene2e6efc42010-01-05 01:27:17 +00001047 dbgs() << "STORE LOAD DEP WITH COMMON BASE:\n"
Chris Lattnerd28f9082009-09-21 06:24:16 +00001048 << "Base = " << *StoreBase << "\n"
Chris Lattner42376062009-12-06 01:57:02 +00001049 << "Store Ptr = " << *WritePtr << "\n"
1050 << "Store Offs = " << StoreOffset << "\n"
Chris Lattner3ddf8042009-12-10 00:04:46 +00001051 << "Load Ptr = " << *LoadPtr << "\n";
Chris Lattner946b58d2009-12-09 02:41:54 +00001052 abort();
Chris Lattnerd28f9082009-09-21 06:24:16 +00001053#endif
1054 return -1;
1055 }
1056
1057 // If the Load isn't completely contained within the stored bits, we don't
1058 // have all the bits to feed it. We could do something crazy in the future
1059 // (issue a smaller load then merge the bits in) but this seems unlikely to be
1060 // valuable.
1061 if (StoreOffset > LoadOffset ||
1062 StoreOffset+StoreSize < LoadOffset+LoadSize)
1063 return -1;
1064
1065 // Okay, we can do this transformation. Return the number of bytes into the
1066 // store that the load is.
1067 return LoadOffset-StoreOffset;
1068}
1069
Chris Lattner42376062009-12-06 01:57:02 +00001070/// AnalyzeLoadFromClobberingStore - This function is called when we have a
1071/// memdep query of a load that ends up being a clobbering store.
Chris Lattner07df9ef2009-12-09 07:37:07 +00001072static int AnalyzeLoadFromClobberingStore(const Type *LoadTy, Value *LoadPtr,
1073 StoreInst *DepSI,
Chris Lattner42376062009-12-06 01:57:02 +00001074 const TargetData &TD) {
1075 // Cannot handle reading from store of first-class aggregate yet.
Dan Gohmand2099112010-11-10 19:03:33 +00001076 if (DepSI->getValueOperand()->getType()->isStructTy() ||
1077 DepSI->getValueOperand()->getType()->isArrayTy())
Chris Lattner42376062009-12-06 01:57:02 +00001078 return -1;
1079
1080 Value *StorePtr = DepSI->getPointerOperand();
Dan Gohmand2099112010-11-10 19:03:33 +00001081 uint64_t StoreSize =TD.getTypeSizeInBits(DepSI->getValueOperand()->getType());
Chris Lattner07df9ef2009-12-09 07:37:07 +00001082 return AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr,
Chris Lattner0def8612009-12-09 07:34:10 +00001083 StorePtr, StoreSize, TD);
Chris Lattner42376062009-12-06 01:57:02 +00001084}
1085
Chris Lattner07df9ef2009-12-09 07:37:07 +00001086static int AnalyzeLoadFromClobberingMemInst(const Type *LoadTy, Value *LoadPtr,
1087 MemIntrinsic *MI,
Chris Lattner42376062009-12-06 01:57:02 +00001088 const TargetData &TD) {
1089 // If the mem operation is a non-constant size, we can't handle it.
1090 ConstantInt *SizeCst = dyn_cast<ConstantInt>(MI->getLength());
1091 if (SizeCst == 0) return -1;
1092 uint64_t MemSizeInBits = SizeCst->getZExtValue()*8;
Chris Lattner778cb922009-12-06 05:29:56 +00001093
1094 // If this is memset, we just need to see if the offset is valid in the size
1095 // of the memset..
Chris Lattner42376062009-12-06 01:57:02 +00001096 if (MI->getIntrinsicID() == Intrinsic::memset)
Chris Lattner07df9ef2009-12-09 07:37:07 +00001097 return AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr, MI->getDest(),
1098 MemSizeInBits, TD);
Chris Lattner42376062009-12-06 01:57:02 +00001099
Chris Lattner778cb922009-12-06 05:29:56 +00001100 // If we have a memcpy/memmove, the only case we can handle is if this is a
1101 // copy from constant memory. In that case, we can read directly from the
1102 // constant memory.
1103 MemTransferInst *MTI = cast<MemTransferInst>(MI);
1104
1105 Constant *Src = dyn_cast<Constant>(MTI->getSource());
1106 if (Src == 0) return -1;
1107
1108 GlobalVariable *GV = dyn_cast<GlobalVariable>(Src->getUnderlyingObject());
1109 if (GV == 0 || !GV->isConstant()) return -1;
1110
1111 // See if the access is within the bounds of the transfer.
Chris Lattner07df9ef2009-12-09 07:37:07 +00001112 int Offset = AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr,
1113 MI->getDest(), MemSizeInBits, TD);
Chris Lattner778cb922009-12-06 05:29:56 +00001114 if (Offset == -1)
1115 return Offset;
1116
1117 // Otherwise, see if we can constant fold a load from the constant with the
1118 // offset applied as appropriate.
1119 Src = ConstantExpr::getBitCast(Src,
1120 llvm::Type::getInt8PtrTy(Src->getContext()));
1121 Constant *OffsetCst =
1122 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1123 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
Chris Lattner07df9ef2009-12-09 07:37:07 +00001124 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(LoadTy));
Chris Lattner778cb922009-12-06 05:29:56 +00001125 if (ConstantFoldLoadFromConstPtr(Src, &TD))
1126 return Offset;
Chris Lattner42376062009-12-06 01:57:02 +00001127 return -1;
1128}
1129
Chris Lattnerd28f9082009-09-21 06:24:16 +00001130
1131/// GetStoreValueForLoad - This function is called when we have a
1132/// memdep query of a load that ends up being a clobbering store. This means
1133/// that the store *may* provide bits used by the load but we can't be sure
1134/// because the pointers don't mustalias. Check this case to see if there is
1135/// anything more we can do before we give up.
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001136static Value *GetStoreValueForLoad(Value *SrcVal, unsigned Offset,
1137 const Type *LoadTy,
1138 Instruction *InsertPt, const TargetData &TD){
Chris Lattnerd28f9082009-09-21 06:24:16 +00001139 LLVMContext &Ctx = SrcVal->getType()->getContext();
1140
Chris Lattner5a62d6e2010-05-08 20:01:44 +00001141 uint64_t StoreSize = (TD.getTypeSizeInBits(SrcVal->getType()) + 7) / 8;
1142 uint64_t LoadSize = (TD.getTypeSizeInBits(LoadTy) + 7) / 8;
Chris Lattnerd28f9082009-09-21 06:24:16 +00001143
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001144 IRBuilder<> Builder(InsertPt->getParent(), InsertPt);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001145
1146 // Compute which bits of the stored value are being used by the load. Convert
1147 // to an integer type to start with.
Duncan Sands19d0b472010-02-16 11:11:14 +00001148 if (SrcVal->getType()->isPointerTy())
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001149 SrcVal = Builder.CreatePtrToInt(SrcVal, TD.getIntPtrType(Ctx), "tmp");
Duncan Sands19d0b472010-02-16 11:11:14 +00001150 if (!SrcVal->getType()->isIntegerTy())
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001151 SrcVal = Builder.CreateBitCast(SrcVal, IntegerType::get(Ctx, StoreSize*8),
1152 "tmp");
Chris Lattnerd28f9082009-09-21 06:24:16 +00001153
1154 // Shift the bits to the least significant depending on endianness.
1155 unsigned ShiftAmt;
Chris Lattner42376062009-12-06 01:57:02 +00001156 if (TD.isLittleEndian())
Chris Lattnerd28f9082009-09-21 06:24:16 +00001157 ShiftAmt = Offset*8;
Chris Lattner42376062009-12-06 01:57:02 +00001158 else
Chris Lattner24705382009-09-21 17:55:47 +00001159 ShiftAmt = (StoreSize-LoadSize-Offset)*8;
Chris Lattnerd28f9082009-09-21 06:24:16 +00001160
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001161 if (ShiftAmt)
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001162 SrcVal = Builder.CreateLShr(SrcVal, ShiftAmt, "tmp");
Chris Lattnerd28f9082009-09-21 06:24:16 +00001163
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001164 if (LoadSize != StoreSize)
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001165 SrcVal = Builder.CreateTrunc(SrcVal, IntegerType::get(Ctx, LoadSize*8),
1166 "tmp");
Chris Lattnerd28f9082009-09-21 06:24:16 +00001167
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001168 return CoerceAvailableValueToLoadType(SrcVal, LoadTy, InsertPt, TD);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001169}
1170
Chris Lattner42376062009-12-06 01:57:02 +00001171/// GetMemInstValueForLoad - This function is called when we have a
1172/// memdep query of a load that ends up being a clobbering mem intrinsic.
1173static Value *GetMemInstValueForLoad(MemIntrinsic *SrcInst, unsigned Offset,
1174 const Type *LoadTy, Instruction *InsertPt,
1175 const TargetData &TD){
1176 LLVMContext &Ctx = LoadTy->getContext();
1177 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy)/8;
1178
1179 IRBuilder<> Builder(InsertPt->getParent(), InsertPt);
1180
1181 // We know that this method is only called when the mem transfer fully
1182 // provides the bits for the load.
1183 if (MemSetInst *MSI = dyn_cast<MemSetInst>(SrcInst)) {
1184 // memset(P, 'x', 1234) -> splat('x'), even if x is a variable, and
1185 // independently of what the offset is.
1186 Value *Val = MSI->getValue();
1187 if (LoadSize != 1)
1188 Val = Builder.CreateZExt(Val, IntegerType::get(Ctx, LoadSize*8));
1189
1190 Value *OneElt = Val;
1191
1192 // Splat the value out to the right number of bits.
1193 for (unsigned NumBytesSet = 1; NumBytesSet != LoadSize; ) {
1194 // If we can double the number of bytes set, do it.
1195 if (NumBytesSet*2 <= LoadSize) {
1196 Value *ShVal = Builder.CreateShl(Val, NumBytesSet*8);
1197 Val = Builder.CreateOr(Val, ShVal);
1198 NumBytesSet <<= 1;
1199 continue;
1200 }
1201
1202 // Otherwise insert one byte at a time.
1203 Value *ShVal = Builder.CreateShl(Val, 1*8);
1204 Val = Builder.CreateOr(OneElt, ShVal);
1205 ++NumBytesSet;
1206 }
1207
1208 return CoerceAvailableValueToLoadType(Val, LoadTy, InsertPt, TD);
1209 }
Chris Lattner778cb922009-12-06 05:29:56 +00001210
1211 // Otherwise, this is a memcpy/memmove from a constant global.
1212 MemTransferInst *MTI = cast<MemTransferInst>(SrcInst);
1213 Constant *Src = cast<Constant>(MTI->getSource());
1214
1215 // Otherwise, see if we can constant fold a load from the constant with the
1216 // offset applied as appropriate.
1217 Src = ConstantExpr::getBitCast(Src,
1218 llvm::Type::getInt8PtrTy(Src->getContext()));
1219 Constant *OffsetCst =
1220 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1221 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
1222 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(LoadTy));
1223 return ConstantFoldLoadFromConstPtr(Src, &TD);
Chris Lattner42376062009-12-06 01:57:02 +00001224}
1225
Dan Gohmanb29cda92010-04-15 17:08:50 +00001226namespace {
Chris Lattner42376062009-12-06 01:57:02 +00001227
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001228struct AvailableValueInBlock {
1229 /// BB - The basic block in question.
1230 BasicBlock *BB;
Chris Lattner93236ba2009-12-06 04:54:31 +00001231 enum ValType {
1232 SimpleVal, // A simple offsetted value that is accessed.
1233 MemIntrin // A memory intrinsic which is loaded from.
1234 };
1235
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001236 /// V - The value that is live out of the block.
Chris Lattner93236ba2009-12-06 04:54:31 +00001237 PointerIntPair<Value *, 1, ValType> Val;
1238
1239 /// Offset - The byte offset in Val that is interesting for the load query.
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001240 unsigned Offset;
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001241
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001242 static AvailableValueInBlock get(BasicBlock *BB, Value *V,
1243 unsigned Offset = 0) {
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001244 AvailableValueInBlock Res;
1245 Res.BB = BB;
Chris Lattner93236ba2009-12-06 04:54:31 +00001246 Res.Val.setPointer(V);
1247 Res.Val.setInt(SimpleVal);
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001248 Res.Offset = Offset;
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001249 return Res;
1250 }
Chris Lattner93236ba2009-12-06 04:54:31 +00001251
1252 static AvailableValueInBlock getMI(BasicBlock *BB, MemIntrinsic *MI,
1253 unsigned Offset = 0) {
1254 AvailableValueInBlock Res;
1255 Res.BB = BB;
1256 Res.Val.setPointer(MI);
1257 Res.Val.setInt(MemIntrin);
1258 Res.Offset = Offset;
1259 return Res;
1260 }
1261
1262 bool isSimpleValue() const { return Val.getInt() == SimpleVal; }
1263 Value *getSimpleValue() const {
1264 assert(isSimpleValue() && "Wrong accessor");
1265 return Val.getPointer();
1266 }
1267
1268 MemIntrinsic *getMemIntrinValue() const {
1269 assert(!isSimpleValue() && "Wrong accessor");
1270 return cast<MemIntrinsic>(Val.getPointer());
1271 }
Chris Lattner927b0ac2009-12-21 23:04:33 +00001272
1273 /// MaterializeAdjustedValue - Emit code into this block to adjust the value
1274 /// defined here to the specified type. This handles various coercion cases.
1275 Value *MaterializeAdjustedValue(const Type *LoadTy,
1276 const TargetData *TD) const {
1277 Value *Res;
1278 if (isSimpleValue()) {
1279 Res = getSimpleValue();
1280 if (Res->getType() != LoadTy) {
1281 assert(TD && "Need target data to handle type mismatch case");
1282 Res = GetStoreValueForLoad(Res, Offset, LoadTy, BB->getTerminator(),
1283 *TD);
1284
1285 DEBUG(errs() << "GVN COERCED NONLOCAL VAL:\nOffset: " << Offset << " "
1286 << *getSimpleValue() << '\n'
1287 << *Res << '\n' << "\n\n\n");
1288 }
1289 } else {
1290 Res = GetMemInstValueForLoad(getMemIntrinValue(), Offset,
1291 LoadTy, BB->getTerminator(), *TD);
1292 DEBUG(errs() << "GVN COERCED NONLOCAL MEM INTRIN:\nOffset: " << Offset
1293 << " " << *getMemIntrinValue() << '\n'
1294 << *Res << '\n' << "\n\n\n");
1295 }
1296 return Res;
1297 }
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001298};
1299
Dan Gohmanb29cda92010-04-15 17:08:50 +00001300}
1301
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001302/// ConstructSSAForLoadSet - Given a set of loads specified by ValuesPerBlock,
1303/// construct SSA form, allowing us to eliminate LI. This returns the value
1304/// that should be used at LI's definition site.
1305static Value *ConstructSSAForLoadSet(LoadInst *LI,
1306 SmallVectorImpl<AvailableValueInBlock> &ValuesPerBlock,
1307 const TargetData *TD,
Chris Lattnerbf200182009-12-21 23:15:48 +00001308 const DominatorTree &DT,
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001309 AliasAnalysis *AA) {
Chris Lattnerbf200182009-12-21 23:15:48 +00001310 // Check for the fully redundant, dominating load case. In this case, we can
1311 // just use the dominating value directly.
1312 if (ValuesPerBlock.size() == 1 &&
1313 DT.properlyDominates(ValuesPerBlock[0].BB, LI->getParent()))
1314 return ValuesPerBlock[0].MaterializeAdjustedValue(LI->getType(), TD);
1315
1316 // Otherwise, we have to construct SSA form.
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001317 SmallVector<PHINode*, 8> NewPHIs;
1318 SSAUpdater SSAUpdate(&NewPHIs);
Duncan Sands67781492010-09-02 08:14:03 +00001319 SSAUpdate.Initialize(LI->getType(), LI->getName());
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001320
1321 const Type *LoadTy = LI->getType();
1322
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001323 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
Chris Lattner93236ba2009-12-06 04:54:31 +00001324 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1325 BasicBlock *BB = AV.BB;
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001326
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001327 if (SSAUpdate.HasValueForBlock(BB))
1328 continue;
Chris Lattner93236ba2009-12-06 04:54:31 +00001329
Chris Lattner927b0ac2009-12-21 23:04:33 +00001330 SSAUpdate.AddAvailableValue(BB, AV.MaterializeAdjustedValue(LoadTy, TD));
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001331 }
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001332
1333 // Perform PHI construction.
1334 Value *V = SSAUpdate.GetValueInMiddleOfBlock(LI->getParent());
1335
1336 // If new PHI nodes were created, notify alias analysis.
Duncan Sands19d0b472010-02-16 11:11:14 +00001337 if (V->getType()->isPointerTy())
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001338 for (unsigned i = 0, e = NewPHIs.size(); i != e; ++i)
1339 AA->copyValue(LI, NewPHIs[i]);
1340
1341 return V;
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001342}
1343
Gabor Greifce6dd882010-04-09 10:57:00 +00001344static bool isLifetimeStart(const Instruction *Inst) {
1345 if (const IntrinsicInst* II = dyn_cast<IntrinsicInst>(Inst))
Owen Andersonb9878ee2009-12-02 07:35:19 +00001346 return II->getIntrinsicID() == Intrinsic::lifetime_start;
Chris Lattnerc4680252009-12-02 06:44:58 +00001347 return false;
1348}
1349
Owen Anderson221a4362007-08-16 22:02:55 +00001350/// processNonLocalLoad - Attempt to eliminate a load whose dependencies are
1351/// non-local by performing PHI construction.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001352bool GVN::processNonLocalLoad(LoadInst *LI,
Chris Lattner804209d2008-03-21 22:01:16 +00001353 SmallVectorImpl<Instruction*> &toErase) {
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001354 // Find the non-local dependencies of the load.
Chris Lattner9b7d99e2009-12-22 04:25:02 +00001355 SmallVector<NonLocalDepResult, 64> Deps;
Dan Gohman65316d62010-11-11 21:50:19 +00001356 AliasAnalysis::Location Loc = VN.getAliasAnalysis()->getLocation(LI);
1357 MD->getNonLocalPointerDependency(Loc, true, LI->getParent(), Deps);
David Greene2e6efc42010-01-05 01:27:17 +00001358 //DEBUG(dbgs() << "INVESTIGATING NONLOCAL LOAD: "
Dan Gohmanef3ef7f2009-07-31 20:24:18 +00001359 // << Deps.size() << *LI << '\n');
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001360
Owen Andersonb39e0de2008-08-26 22:07:42 +00001361 // If we had to process more than one hundred blocks to find the
1362 // dependencies, this load isn't worth worrying about. Optimizing
1363 // it will be too expensive.
Chris Lattnerb6fc4b82008-12-09 19:25:07 +00001364 if (Deps.size() > 100)
Owen Andersonb39e0de2008-08-26 22:07:42 +00001365 return false;
Chris Lattnerb6372932008-12-18 00:51:32 +00001366
1367 // If we had a phi translation failure, we'll have a single entry which is a
1368 // clobber in the current block. Reject this early.
Chris Lattner0c315472009-12-09 07:08:01 +00001369 if (Deps.size() == 1 && Deps[0].getResult().isClobber()) {
Torok Edwinba93ea72009-06-17 18:48:18 +00001370 DEBUG(
David Greene2e6efc42010-01-05 01:27:17 +00001371 dbgs() << "GVN: non-local load ";
1372 WriteAsOperand(dbgs(), LI);
1373 dbgs() << " is clobbered by " << *Deps[0].getResult().getInst() << '\n';
Torok Edwinba93ea72009-06-17 18:48:18 +00001374 );
Chris Lattnerb6372932008-12-18 00:51:32 +00001375 return false;
Torok Edwinba93ea72009-06-17 18:48:18 +00001376 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001377
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001378 // Filter out useless results (non-locals, etc). Keep track of the blocks
1379 // where we have a value available in repl, also keep track of whether we see
1380 // dependencies that produce an unknown value for the load (such as a call
1381 // that could potentially clobber the load).
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001382 SmallVector<AvailableValueInBlock, 16> ValuesPerBlock;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001383 SmallVector<BasicBlock*, 16> UnavailableBlocks;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001384
Chris Lattnerb6fc4b82008-12-09 19:25:07 +00001385 for (unsigned i = 0, e = Deps.size(); i != e; ++i) {
Chris Lattner0c315472009-12-09 07:08:01 +00001386 BasicBlock *DepBB = Deps[i].getBB();
1387 MemDepResult DepInfo = Deps[i].getResult();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001388
Chris Lattner0e3d6332008-12-05 21:04:20 +00001389 if (DepInfo.isClobber()) {
Chris Lattnerca5f9cb2009-12-09 18:21:46 +00001390 // The address being loaded in this non-local block may not be the same as
1391 // the pointer operand of the load if PHI translation occurs. Make sure
1392 // to consider the right address.
1393 Value *Address = Deps[i].getAddress();
1394
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001395 // If the dependence is to a store that writes to a superset of the bits
1396 // read by the load, we can extract the bits we need for the load from the
1397 // stored value.
1398 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInfo.getInst())) {
Chris Lattnerca5f9cb2009-12-09 18:21:46 +00001399 if (TD && Address) {
1400 int Offset = AnalyzeLoadFromClobberingStore(LI->getType(), Address,
Chris Lattner07df9ef2009-12-09 07:37:07 +00001401 DepSI, *TD);
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001402 if (Offset != -1) {
1403 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
Dan Gohmand2099112010-11-10 19:03:33 +00001404 DepSI->getValueOperand(),
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001405 Offset));
1406 continue;
1407 }
1408 }
1409 }
Chris Lattner42376062009-12-06 01:57:02 +00001410
Chris Lattner42376062009-12-06 01:57:02 +00001411 // If the clobbering value is a memset/memcpy/memmove, see if we can
1412 // forward a value on from it.
Chris Lattner93236ba2009-12-06 04:54:31 +00001413 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(DepInfo.getInst())) {
Chris Lattnerca5f9cb2009-12-09 18:21:46 +00001414 if (TD && Address) {
1415 int Offset = AnalyzeLoadFromClobberingMemInst(LI->getType(), Address,
Chris Lattner07df9ef2009-12-09 07:37:07 +00001416 DepMI, *TD);
Chris Lattner93236ba2009-12-06 04:54:31 +00001417 if (Offset != -1) {
1418 ValuesPerBlock.push_back(AvailableValueInBlock::getMI(DepBB, DepMI,
1419 Offset));
1420 continue;
1421 }
Chris Lattner42376062009-12-06 01:57:02 +00001422 }
1423 }
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001424
Chris Lattner0e3d6332008-12-05 21:04:20 +00001425 UnavailableBlocks.push_back(DepBB);
1426 continue;
1427 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001428
Chris Lattner0e3d6332008-12-05 21:04:20 +00001429 Instruction *DepInst = DepInfo.getInst();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001430
Chris Lattner0e3d6332008-12-05 21:04:20 +00001431 // Loading the allocation -> undef.
Chris Lattnerc4680252009-12-02 06:44:58 +00001432 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst) ||
Owen Andersonb9878ee2009-12-02 07:35:19 +00001433 // Loading immediately after lifetime begin -> undef.
1434 isLifetimeStart(DepInst)) {
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001435 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1436 UndefValue::get(LI->getType())));
Chris Lattner7e61daf2008-12-01 01:15:42 +00001437 continue;
1438 }
Owen Anderson2b2bd282009-10-28 07:05:35 +00001439
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001440 if (StoreInst *S = dyn_cast<StoreInst>(DepInst)) {
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001441 // Reject loads and stores that are to the same address but are of
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001442 // different types if we have to.
Dan Gohmand2099112010-11-10 19:03:33 +00001443 if (S->getValueOperand()->getType() != LI->getType()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001444 // If the stored value is larger or equal to the loaded value, we can
1445 // reuse it.
Dan Gohmand2099112010-11-10 19:03:33 +00001446 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(S->getValueOperand(),
Chris Lattner9045f232009-09-21 17:24:04 +00001447 LI->getType(), *TD)) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001448 UnavailableBlocks.push_back(DepBB);
1449 continue;
1450 }
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001451 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001452
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001453 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
Dan Gohmand2099112010-11-10 19:03:33 +00001454 S->getValueOperand()));
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001455 continue;
1456 }
1457
1458 if (LoadInst *LD = dyn_cast<LoadInst>(DepInst)) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001459 // If the types mismatch and we can't handle it, reject reuse of the load.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001460 if (LD->getType() != LI->getType()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001461 // If the stored value is larger or equal to the loaded value, we can
1462 // reuse it.
Chris Lattner9045f232009-09-21 17:24:04 +00001463 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(LD, LI->getType(),*TD)){
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001464 UnavailableBlocks.push_back(DepBB);
1465 continue;
1466 }
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001467 }
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001468 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB, LD));
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001469 continue;
Owen Anderson5e5599b2007-07-25 19:57:03 +00001470 }
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001471
1472 UnavailableBlocks.push_back(DepBB);
1473 continue;
Chris Lattner2876a642008-03-21 21:14:38 +00001474 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001475
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001476 // If we have no predecessors that produce a known value for this load, exit
1477 // early.
1478 if (ValuesPerBlock.empty()) return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001479
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001480 // If all of the instructions we depend on produce a known value for this
1481 // load, then it is fully redundant and we can use PHI insertion to compute
1482 // its value. Insert PHIs and remove the fully redundant value now.
1483 if (UnavailableBlocks.empty()) {
David Greene2e6efc42010-01-05 01:27:17 +00001484 DEBUG(dbgs() << "GVN REMOVING NONLOCAL LOAD: " << *LI << '\n');
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001485
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001486 // Perform PHI construction.
Chris Lattnerbf200182009-12-21 23:15:48 +00001487 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD, *DT,
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001488 VN.getAliasAnalysis());
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001489 LI->replaceAllUsesWith(V);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001490
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001491 if (isa<PHINode>(V))
1492 V->takeName(LI);
Duncan Sands19d0b472010-02-16 11:11:14 +00001493 if (V->getType()->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001494 MD->invalidateCachedPointerInfo(V);
Bob Wilson1da90412010-02-22 21:39:41 +00001495 VN.erase(LI);
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001496 toErase.push_back(LI);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001497 ++NumGVNLoad;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001498 return true;
1499 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001500
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001501 if (!EnablePRE || !EnableLoadPRE)
1502 return false;
1503
1504 // Okay, we have *some* definitions of the value. This means that the value
1505 // is available in some of our (transitive) predecessors. Lets think about
1506 // doing PRE of this load. This will involve inserting a new load into the
1507 // predecessor when it's not available. We could do this in general, but
1508 // prefer to not increase code size. As such, we only do this when we know
1509 // that we only have to insert *one* load (which means we're basically moving
1510 // the load, not inserting a new one).
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001511
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001512 SmallPtrSet<BasicBlock *, 4> Blockers;
1513 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1514 Blockers.insert(UnavailableBlocks[i]);
1515
1516 // Lets find first basic block with more than one predecessor. Walk backwards
1517 // through predecessors if needed.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001518 BasicBlock *LoadBB = LI->getParent();
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001519 BasicBlock *TmpBB = LoadBB;
1520
1521 bool isSinglePred = false;
Dale Johannesen81b64632009-06-17 20:48:23 +00001522 bool allSingleSucc = true;
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001523 while (TmpBB->getSinglePredecessor()) {
1524 isSinglePred = true;
1525 TmpBB = TmpBB->getSinglePredecessor();
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001526 if (TmpBB == LoadBB) // Infinite (unreachable) loop.
1527 return false;
1528 if (Blockers.count(TmpBB))
1529 return false;
Owen Andersonb590a922010-09-25 05:26:18 +00001530
1531 // If any of these blocks has more than one successor (i.e. if the edge we
1532 // just traversed was critical), then there are other paths through this
1533 // block along which the load may not be anticipated. Hoisting the load
1534 // above this block would be adding the load to execution paths along
1535 // which it was not previously executed.
Dale Johannesen81b64632009-06-17 20:48:23 +00001536 if (TmpBB->getTerminator()->getNumSuccessors() != 1)
Owen Andersonb590a922010-09-25 05:26:18 +00001537 return false;
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001538 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001539
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001540 assert(TmpBB);
1541 LoadBB = TmpBB;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001542
Chris Lattner93236ba2009-12-06 04:54:31 +00001543 // FIXME: It is extremely unclear what this loop is doing, other than
1544 // artificially restricting loadpre.
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001545 if (isSinglePred) {
1546 bool isHot = false;
Chris Lattner93236ba2009-12-06 04:54:31 +00001547 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
1548 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1549 if (AV.isSimpleValue())
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001550 // "Hot" Instruction is in some loop (because it dominates its dep.
1551 // instruction).
Chris Lattner93236ba2009-12-06 04:54:31 +00001552 if (Instruction *I = dyn_cast<Instruction>(AV.getSimpleValue()))
1553 if (DT->dominates(LI, I)) {
1554 isHot = true;
1555 break;
1556 }
1557 }
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001558
1559 // We are interested only in "hot" instructions. We don't want to do any
1560 // mis-optimizations here.
1561 if (!isHot)
1562 return false;
1563 }
1564
Bob Wilsond517b522010-02-01 21:17:14 +00001565 // Check to see how many predecessors have the loaded value fully
1566 // available.
1567 DenseMap<BasicBlock*, Value*> PredLoads;
Chris Lattnerd2a653a2008-12-05 07:49:08 +00001568 DenseMap<BasicBlock*, char> FullyAvailableBlocks;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001569 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i)
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001570 FullyAvailableBlocks[ValuesPerBlock[i].BB] = true;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001571 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1572 FullyAvailableBlocks[UnavailableBlocks[i]] = false;
1573
Bob Wilsona2fda8b2010-05-04 20:03:21 +00001574 SmallVector<std::pair<TerminatorInst*, unsigned>, 4> NeedToSplit;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001575 for (pred_iterator PI = pred_begin(LoadBB), E = pred_end(LoadBB);
1576 PI != E; ++PI) {
Bob Wilsond517b522010-02-01 21:17:14 +00001577 BasicBlock *Pred = *PI;
1578 if (IsValueFullyAvailableInBlock(Pred, FullyAvailableBlocks)) {
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001579 continue;
Bob Wilsond517b522010-02-01 21:17:14 +00001580 }
1581 PredLoads[Pred] = 0;
Bob Wilson92cdb6e2010-02-16 19:51:59 +00001582
Bob Wilsond517b522010-02-01 21:17:14 +00001583 if (Pred->getTerminator()->getNumSuccessors() != 1) {
Bob Wilson92cdb6e2010-02-16 19:51:59 +00001584 if (isa<IndirectBrInst>(Pred->getTerminator())) {
1585 DEBUG(dbgs() << "COULD NOT PRE LOAD BECAUSE OF INDBR CRITICAL EDGE '"
1586 << Pred->getName() << "': " << *LI << '\n');
1587 return false;
1588 }
Bob Wilsonaff96b22010-02-16 21:06:42 +00001589 unsigned SuccNum = GetSuccessorNumber(Pred, LoadBB);
Bob Wilsona2fda8b2010-05-04 20:03:21 +00001590 NeedToSplit.push_back(std::make_pair(Pred->getTerminator(), SuccNum));
Bob Wilsond517b522010-02-01 21:17:14 +00001591 }
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001592 }
Bob Wilsona2fda8b2010-05-04 20:03:21 +00001593 if (!NeedToSplit.empty()) {
Bob Wilson0c8b29b2010-05-05 20:44:15 +00001594 toSplit.append(NeedToSplit.begin(), NeedToSplit.end());
Bob Wilson892432b2010-03-01 23:37:32 +00001595 return false;
Bob Wilsona2fda8b2010-05-04 20:03:21 +00001596 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001597
Bob Wilsond517b522010-02-01 21:17:14 +00001598 // Decide whether PRE is profitable for this load.
1599 unsigned NumUnavailablePreds = PredLoads.size();
1600 assert(NumUnavailablePreds != 0 &&
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001601 "Fully available value should be eliminated above!");
Owen Anderson13a642d2010-10-01 20:02:55 +00001602
1603 // If this load is unavailable in multiple predecessors, reject it.
1604 // FIXME: If we could restructure the CFG, we could make a common pred with
1605 // all the preds that don't have an available LI and insert a new load into
1606 // that one block.
1607 if (NumUnavailablePreds != 1)
Bob Wilsond517b522010-02-01 21:17:14 +00001608 return false;
Bob Wilsond517b522010-02-01 21:17:14 +00001609
1610 // Check if the load can safely be moved to all the unavailable predecessors.
1611 bool CanDoPRE = true;
Chris Lattner44da5bd2009-11-28 15:39:14 +00001612 SmallVector<Instruction*, 8> NewInsts;
Bob Wilsond517b522010-02-01 21:17:14 +00001613 for (DenseMap<BasicBlock*, Value*>::iterator I = PredLoads.begin(),
1614 E = PredLoads.end(); I != E; ++I) {
1615 BasicBlock *UnavailablePred = I->first;
1616
1617 // Do PHI translation to get its value in the predecessor if necessary. The
1618 // returned pointer (if non-null) is guaranteed to dominate UnavailablePred.
1619
1620 // If all preds have a single successor, then we know it is safe to insert
1621 // the load on the pred (?!?), so we can insert code to materialize the
1622 // pointer if it is not available.
Dan Gohmand2099112010-11-10 19:03:33 +00001623 PHITransAddr Address(LI->getPointerOperand(), TD);
Bob Wilsond517b522010-02-01 21:17:14 +00001624 Value *LoadPtr = 0;
1625 if (allSingleSucc) {
1626 LoadPtr = Address.PHITranslateWithInsertion(LoadBB, UnavailablePred,
1627 *DT, NewInsts);
1628 } else {
Daniel Dunbar693ea892010-02-24 08:48:04 +00001629 Address.PHITranslateValue(LoadBB, UnavailablePred, DT);
Bob Wilsond517b522010-02-01 21:17:14 +00001630 LoadPtr = Address.getAddr();
Bob Wilsond517b522010-02-01 21:17:14 +00001631 }
1632
1633 // If we couldn't find or insert a computation of this phi translated value,
1634 // we fail PRE.
1635 if (LoadPtr == 0) {
1636 DEBUG(dbgs() << "COULDN'T INSERT PHI TRANSLATED VALUE OF: "
Dan Gohmand2099112010-11-10 19:03:33 +00001637 << *LI->getPointerOperand() << "\n");
Bob Wilsond517b522010-02-01 21:17:14 +00001638 CanDoPRE = false;
1639 break;
1640 }
1641
1642 // Make sure it is valid to move this load here. We have to watch out for:
1643 // @1 = getelementptr (i8* p, ...
1644 // test p and branch if == 0
1645 // load @1
1646 // It is valid to have the getelementptr before the test, even if p can be 0,
1647 // as getelementptr only does address arithmetic.
1648 // If we are not pushing the value through any multiple-successor blocks
1649 // we do not have this case. Otherwise, check that the load is safe to
1650 // put anywhere; this can be improved, but should be conservatively safe.
1651 if (!allSingleSucc &&
1652 // FIXME: REEVALUTE THIS.
1653 !isSafeToLoadUnconditionally(LoadPtr,
1654 UnavailablePred->getTerminator(),
1655 LI->getAlignment(), TD)) {
1656 CanDoPRE = false;
1657 break;
1658 }
1659
1660 I->second = LoadPtr;
Chris Lattner972e6d82009-12-09 01:59:31 +00001661 }
1662
Bob Wilsond517b522010-02-01 21:17:14 +00001663 if (!CanDoPRE) {
1664 while (!NewInsts.empty())
1665 NewInsts.pop_back_val()->eraseFromParent();
Dale Johannesen81b64632009-06-17 20:48:23 +00001666 return false;
Chris Lattner32140312009-11-28 16:08:18 +00001667 }
Dale Johannesen81b64632009-06-17 20:48:23 +00001668
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001669 // Okay, we can eliminate this load by inserting a reload in the predecessor
1670 // and using PHI construction to get the value in the other predecessors, do
1671 // it.
David Greene2e6efc42010-01-05 01:27:17 +00001672 DEBUG(dbgs() << "GVN REMOVING PRE LOAD: " << *LI << '\n');
Chris Lattner32140312009-11-28 16:08:18 +00001673 DEBUG(if (!NewInsts.empty())
David Greene2e6efc42010-01-05 01:27:17 +00001674 dbgs() << "INSERTED " << NewInsts.size() << " INSTS: "
Chris Lattner32140312009-11-28 16:08:18 +00001675 << *NewInsts.back() << '\n');
1676
Bob Wilsond517b522010-02-01 21:17:14 +00001677 // Assign value numbers to the new instructions.
1678 for (unsigned i = 0, e = NewInsts.size(); i != e; ++i) {
1679 // FIXME: We really _ought_ to insert these value numbers into their
1680 // parent's availability map. However, in doing so, we risk getting into
1681 // ordering issues. If a block hasn't been processed yet, we would be
1682 // marking a value as AVAIL-IN, which isn't what we intend.
1683 VN.lookup_or_add(NewInsts[i]);
1684 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001685
Bob Wilsond517b522010-02-01 21:17:14 +00001686 for (DenseMap<BasicBlock*, Value*>::iterator I = PredLoads.begin(),
1687 E = PredLoads.end(); I != E; ++I) {
1688 BasicBlock *UnavailablePred = I->first;
1689 Value *LoadPtr = I->second;
1690
1691 Value *NewLoad = new LoadInst(LoadPtr, LI->getName()+".pre", false,
1692 LI->getAlignment(),
1693 UnavailablePred->getTerminator());
1694
1695 // Add the newly created load.
1696 ValuesPerBlock.push_back(AvailableValueInBlock::get(UnavailablePred,
1697 NewLoad));
Bob Wilson923261b2010-02-23 05:55:00 +00001698 MD->invalidateCachedPointerInfo(LoadPtr);
1699 DEBUG(dbgs() << "GVN INSERTED " << *NewLoad << '\n');
Bob Wilsond517b522010-02-01 21:17:14 +00001700 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001701
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001702 // Perform PHI construction.
Chris Lattnerbf200182009-12-21 23:15:48 +00001703 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD, *DT,
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001704 VN.getAliasAnalysis());
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001705 LI->replaceAllUsesWith(V);
1706 if (isa<PHINode>(V))
1707 V->takeName(LI);
Duncan Sands19d0b472010-02-16 11:11:14 +00001708 if (V->getType()->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001709 MD->invalidateCachedPointerInfo(V);
Bob Wilson1da90412010-02-22 21:39:41 +00001710 VN.erase(LI);
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001711 toErase.push_back(LI);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001712 ++NumPRELoad;
Owen Anderson5e5599b2007-07-25 19:57:03 +00001713 return true;
1714}
1715
Owen Anderson221a4362007-08-16 22:02:55 +00001716/// processLoad - Attempt to eliminate a load, first by eliminating it
1717/// locally, and then attempting non-local elimination if that fails.
Chris Lattner0e3d6332008-12-05 21:04:20 +00001718bool GVN::processLoad(LoadInst *L, SmallVectorImpl<Instruction*> &toErase) {
Dan Gohman81132462009-11-14 02:27:51 +00001719 if (!MD)
1720 return false;
1721
Chris Lattner0e3d6332008-12-05 21:04:20 +00001722 if (L->isVolatile())
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001723 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001724
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001725 // ... to a pointer that has been loaded from before...
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001726 MemDepResult Dep = MD->getDependency(L);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001727
Chris Lattner0e3d6332008-12-05 21:04:20 +00001728 // If the value isn't available, don't do anything!
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001729 if (Dep.isClobber()) {
Chris Lattner0a9616d2009-09-21 05:57:11 +00001730 // Check to see if we have something like this:
Chris Lattner1dd48c32009-09-20 19:03:47 +00001731 // store i32 123, i32* %P
1732 // %A = bitcast i32* %P to i8*
1733 // %B = gep i8* %A, i32 1
1734 // %C = load i8* %B
1735 //
1736 // We could do that by recognizing if the clobber instructions are obviously
1737 // a common base + constant offset, and if the previous store (or memset)
1738 // completely covers this load. This sort of thing can happen in bitfield
1739 // access code.
Chris Lattner42376062009-12-06 01:57:02 +00001740 Value *AvailVal = 0;
Chris Lattner0a9616d2009-09-21 05:57:11 +00001741 if (StoreInst *DepSI = dyn_cast<StoreInst>(Dep.getInst()))
Duncan Sands246b71c2010-11-12 21:10:24 +00001742 if (TD) {
Chris Lattner07df9ef2009-12-09 07:37:07 +00001743 int Offset = AnalyzeLoadFromClobberingStore(L->getType(),
1744 L->getPointerOperand(),
1745 DepSI, *TD);
Chris Lattner42376062009-12-06 01:57:02 +00001746 if (Offset != -1)
Dan Gohmand2099112010-11-10 19:03:33 +00001747 AvailVal = GetStoreValueForLoad(DepSI->getValueOperand(), Offset,
Chris Lattner42376062009-12-06 01:57:02 +00001748 L->getType(), L, *TD);
Chris Lattner9d7fb292009-09-21 06:22:46 +00001749 }
Chris Lattner0a9616d2009-09-21 05:57:11 +00001750
Chris Lattner42376062009-12-06 01:57:02 +00001751 // If the clobbering value is a memset/memcpy/memmove, see if we can forward
1752 // a value on from it.
1753 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(Dep.getInst())) {
Duncan Sands246b71c2010-11-12 21:10:24 +00001754 if (TD) {
Chris Lattner07df9ef2009-12-09 07:37:07 +00001755 int Offset = AnalyzeLoadFromClobberingMemInst(L->getType(),
1756 L->getPointerOperand(),
1757 DepMI, *TD);
Chris Lattner42376062009-12-06 01:57:02 +00001758 if (Offset != -1)
1759 AvailVal = GetMemInstValueForLoad(DepMI, Offset, L->getType(), L,*TD);
1760 }
1761 }
1762
1763 if (AvailVal) {
David Greene2e6efc42010-01-05 01:27:17 +00001764 DEBUG(dbgs() << "GVN COERCED INST:\n" << *Dep.getInst() << '\n'
Chris Lattner42376062009-12-06 01:57:02 +00001765 << *AvailVal << '\n' << *L << "\n\n\n");
1766
1767 // Replace the load!
1768 L->replaceAllUsesWith(AvailVal);
Duncan Sands19d0b472010-02-16 11:11:14 +00001769 if (AvailVal->getType()->isPointerTy())
Chris Lattner42376062009-12-06 01:57:02 +00001770 MD->invalidateCachedPointerInfo(AvailVal);
Bob Wilson1da90412010-02-22 21:39:41 +00001771 VN.erase(L);
Chris Lattner42376062009-12-06 01:57:02 +00001772 toErase.push_back(L);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001773 ++NumGVNLoad;
Chris Lattner42376062009-12-06 01:57:02 +00001774 return true;
1775 }
1776
Torok Edwin72070282009-05-29 09:46:03 +00001777 DEBUG(
1778 // fast print dep, using operator<< on instruction would be too slow
David Greene2e6efc42010-01-05 01:27:17 +00001779 dbgs() << "GVN: load ";
1780 WriteAsOperand(dbgs(), L);
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001781 Instruction *I = Dep.getInst();
David Greene2e6efc42010-01-05 01:27:17 +00001782 dbgs() << " is clobbered by " << *I << '\n';
Torok Edwin72070282009-05-29 09:46:03 +00001783 );
Chris Lattner0e3d6332008-12-05 21:04:20 +00001784 return false;
Torok Edwin72070282009-05-29 09:46:03 +00001785 }
Chris Lattner0e3d6332008-12-05 21:04:20 +00001786
1787 // If it is defined in another block, try harder.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001788 if (Dep.isNonLocal())
Chris Lattner0e3d6332008-12-05 21:04:20 +00001789 return processNonLocalLoad(L, toErase);
Eli Friedman716c10c2008-02-12 12:08:14 +00001790
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001791 Instruction *DepInst = Dep.getInst();
Chris Lattner0e3d6332008-12-05 21:04:20 +00001792 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInst)) {
Dan Gohmand2099112010-11-10 19:03:33 +00001793 Value *StoredVal = DepSI->getValueOperand();
Chris Lattner1dd48c32009-09-20 19:03:47 +00001794
1795 // The store and load are to a must-aliased pointer, but they may not
1796 // actually have the same type. See if we know how to reuse the stored
1797 // value (depending on its type).
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001798 if (StoredVal->getType() != L->getType()) {
Duncan Sands246b71c2010-11-12 21:10:24 +00001799 if (TD) {
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001800 StoredVal = CoerceAvailableValueToLoadType(StoredVal, L->getType(),
1801 L, *TD);
1802 if (StoredVal == 0)
1803 return false;
1804
David Greene2e6efc42010-01-05 01:27:17 +00001805 DEBUG(dbgs() << "GVN COERCED STORE:\n" << *DepSI << '\n' << *StoredVal
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001806 << '\n' << *L << "\n\n\n");
1807 }
1808 else
Chris Lattner1dd48c32009-09-20 19:03:47 +00001809 return false;
Chris Lattner1dd48c32009-09-20 19:03:47 +00001810 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001811
Chris Lattner0e3d6332008-12-05 21:04:20 +00001812 // Remove it!
Chris Lattner1dd48c32009-09-20 19:03:47 +00001813 L->replaceAllUsesWith(StoredVal);
Duncan Sands19d0b472010-02-16 11:11:14 +00001814 if (StoredVal->getType()->isPointerTy())
Chris Lattner1dd48c32009-09-20 19:03:47 +00001815 MD->invalidateCachedPointerInfo(StoredVal);
Bob Wilson1da90412010-02-22 21:39:41 +00001816 VN.erase(L);
Chris Lattner0e3d6332008-12-05 21:04:20 +00001817 toErase.push_back(L);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001818 ++NumGVNLoad;
Chris Lattner0e3d6332008-12-05 21:04:20 +00001819 return true;
1820 }
1821
1822 if (LoadInst *DepLI = dyn_cast<LoadInst>(DepInst)) {
Chris Lattner1dd48c32009-09-20 19:03:47 +00001823 Value *AvailableVal = DepLI;
1824
1825 // The loads are of a must-aliased pointer, but they may not actually have
1826 // the same type. See if we know how to reuse the previously loaded value
1827 // (depending on its type).
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001828 if (DepLI->getType() != L->getType()) {
Duncan Sands246b71c2010-11-12 21:10:24 +00001829 if (TD) {
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001830 AvailableVal = CoerceAvailableValueToLoadType(DepLI, L->getType(), L,*TD);
1831 if (AvailableVal == 0)
1832 return false;
Chris Lattner1dd48c32009-09-20 19:03:47 +00001833
David Greene2e6efc42010-01-05 01:27:17 +00001834 DEBUG(dbgs() << "GVN COERCED LOAD:\n" << *DepLI << "\n" << *AvailableVal
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001835 << "\n" << *L << "\n\n\n");
1836 }
1837 else
1838 return false;
Chris Lattner1dd48c32009-09-20 19:03:47 +00001839 }
1840
Chris Lattner0e3d6332008-12-05 21:04:20 +00001841 // Remove it!
Chris Lattner1dd48c32009-09-20 19:03:47 +00001842 L->replaceAllUsesWith(AvailableVal);
Duncan Sands19d0b472010-02-16 11:11:14 +00001843 if (DepLI->getType()->isPointerTy())
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00001844 MD->invalidateCachedPointerInfo(DepLI);
Bob Wilson1da90412010-02-22 21:39:41 +00001845 VN.erase(L);
Chris Lattner0e3d6332008-12-05 21:04:20 +00001846 toErase.push_back(L);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001847 ++NumGVNLoad;
Chris Lattner0e3d6332008-12-05 21:04:20 +00001848 return true;
1849 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001850
Chris Lattner3ff6d012008-11-30 01:39:32 +00001851 // If this load really doesn't depend on anything, then we must be loading an
1852 // undef value. This can happen when loading for a fresh allocation with no
1853 // intervening stores, for example.
Victor Hernandez8acf2952009-10-23 21:09:37 +00001854 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst)) {
Owen Andersonb292b8c2009-07-30 23:03:37 +00001855 L->replaceAllUsesWith(UndefValue::get(L->getType()));
Bob Wilson1da90412010-02-22 21:39:41 +00001856 VN.erase(L);
Chris Lattner3ff6d012008-11-30 01:39:32 +00001857 toErase.push_back(L);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001858 ++NumGVNLoad;
Chris Lattner0e3d6332008-12-05 21:04:20 +00001859 return true;
Eli Friedman716c10c2008-02-12 12:08:14 +00001860 }
Owen Anderson2b2bd282009-10-28 07:05:35 +00001861
Owen Andersonb9878ee2009-12-02 07:35:19 +00001862 // If this load occurs either right after a lifetime begin,
Owen Anderson2b2bd282009-10-28 07:05:35 +00001863 // then the loaded value is undefined.
1864 if (IntrinsicInst* II = dyn_cast<IntrinsicInst>(DepInst)) {
Owen Andersonb9878ee2009-12-02 07:35:19 +00001865 if (II->getIntrinsicID() == Intrinsic::lifetime_start) {
Owen Anderson2b2bd282009-10-28 07:05:35 +00001866 L->replaceAllUsesWith(UndefValue::get(L->getType()));
Bob Wilson1da90412010-02-22 21:39:41 +00001867 VN.erase(L);
Owen Anderson2b2bd282009-10-28 07:05:35 +00001868 toErase.push_back(L);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001869 ++NumGVNLoad;
Owen Anderson2b2bd282009-10-28 07:05:35 +00001870 return true;
1871 }
1872 }
Eli Friedman716c10c2008-02-12 12:08:14 +00001873
Chris Lattner0e3d6332008-12-05 21:04:20 +00001874 return false;
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001875}
1876
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001877Value *GVN::lookupNumber(BasicBlock *BB, uint32_t num) {
Owen Anderson54e02192008-06-23 17:49:45 +00001878 DenseMap<BasicBlock*, ValueNumberScope*>::iterator I = localAvail.find(BB);
1879 if (I == localAvail.end())
1880 return 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001881
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001882 ValueNumberScope *Locals = I->second;
1883 while (Locals) {
1884 DenseMap<uint32_t, Value*>::iterator I = Locals->table.find(num);
1885 if (I != Locals->table.end())
Owen Anderson1b3ea962008-06-20 01:15:47 +00001886 return I->second;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001887 Locals = Locals->parent;
Owen Anderson1b3ea962008-06-20 01:15:47 +00001888 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001889
Owen Anderson1b3ea962008-06-20 01:15:47 +00001890 return 0;
1891}
1892
Owen Andersonbfe133e2008-12-15 02:03:00 +00001893
Owen Anderson398602a2007-08-14 18:16:29 +00001894/// processInstruction - When calculating availability, handle an instruction
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001895/// by inserting it into the appropriate sets
Owen Andersonaccdca12008-06-12 19:25:32 +00001896bool GVN::processInstruction(Instruction *I,
Chris Lattner804209d2008-03-21 22:01:16 +00001897 SmallVectorImpl<Instruction*> &toErase) {
Devang Patel03936a12010-02-11 00:20:49 +00001898 // Ignore dbg info intrinsics.
1899 if (isa<DbgInfoIntrinsic>(I))
1900 return false;
1901
Duncan Sands246b71c2010-11-12 21:10:24 +00001902 // If the instruction can be easily simplified then do so now in preference
1903 // to value numbering it. Value numbering often exposes redundancies, for
1904 // example if it determines that %y is equal to %x then the instruction
1905 // "%z = and i32 %x, %y" becomes "%z = and i32 %x, %x" which we now simplify.
Duncan Sandsb99f39b2010-11-14 18:36:10 +00001906 if (Value *V = SimplifyInstruction(I, TD, DT)) {
Duncan Sands246b71c2010-11-12 21:10:24 +00001907 I->replaceAllUsesWith(V);
1908 if (MD && V->getType()->isPointerTy())
1909 MD->invalidateCachedPointerInfo(V);
1910 VN.erase(I);
1911 toErase.push_back(I);
1912 return true;
1913 }
1914
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001915 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
1916 bool Changed = processLoad(LI, toErase);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001917
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001918 if (!Changed) {
1919 unsigned Num = VN.lookup_or_add(LI);
1920 localAvail[I->getParent()]->table.insert(std::make_pair(Num, LI));
Owen Anderson6a903bc2008-06-18 21:41:49 +00001921 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001922
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001923 return Changed;
Owen Anderson6a903bc2008-06-18 21:41:49 +00001924 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001925
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001926 uint32_t NextNum = VN.getNextUnusedValueNumber();
1927 unsigned Num = VN.lookup_or_add(I);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001928
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001929 if (BranchInst *BI = dyn_cast<BranchInst>(I)) {
1930 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001931
Owen Anderson98f912b2009-04-01 23:53:49 +00001932 if (!BI->isConditional() || isa<Constant>(BI->getCondition()))
1933 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001934
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001935 Value *BranchCond = BI->getCondition();
1936 uint32_t CondVN = VN.lookup_or_add(BranchCond);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001937
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001938 BasicBlock *TrueSucc = BI->getSuccessor(0);
1939 BasicBlock *FalseSucc = BI->getSuccessor(1);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001940
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001941 if (TrueSucc->getSinglePredecessor())
1942 localAvail[TrueSucc]->table[CondVN] =
1943 ConstantInt::getTrue(TrueSucc->getContext());
1944 if (FalseSucc->getSinglePredecessor())
1945 localAvail[FalseSucc]->table[CondVN] =
1946 ConstantInt::getFalse(TrueSucc->getContext());
Owen Anderson98f912b2009-04-01 23:53:49 +00001947
1948 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001949
Owen Anderson0c1e6342008-04-07 09:59:07 +00001950 // Allocations are always uniquely numbered, so we can save time and memory
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001951 // by fast failing them.
Victor Hernandez8acf2952009-10-23 21:09:37 +00001952 } else if (isa<AllocaInst>(I) || isa<TerminatorInst>(I)) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001953 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Anderson0c1e6342008-04-07 09:59:07 +00001954 return false;
Owen Anderson6a903bc2008-06-18 21:41:49 +00001955 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001956
Owen Anderson221a4362007-08-16 22:02:55 +00001957 // Collapse PHI nodes
Owen Andersonbc271a02007-08-14 18:33:27 +00001958 if (PHINode* p = dyn_cast<PHINode>(I)) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001959 Value *constVal = CollapsePhi(p);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001960
Owen Andersonbc271a02007-08-14 18:33:27 +00001961 if (constVal) {
Owen Andersonf5023a72007-08-16 22:51:56 +00001962 p->replaceAllUsesWith(constVal);
Duncan Sands19d0b472010-02-16 11:11:14 +00001963 if (MD && constVal->getType()->isPointerTy())
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00001964 MD->invalidateCachedPointerInfo(constVal);
Owen Anderson164274e2008-12-23 00:49:51 +00001965 VN.erase(p);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001966
Owen Andersonf5023a72007-08-16 22:51:56 +00001967 toErase.push_back(p);
Owen Anderson6a903bc2008-06-18 21:41:49 +00001968 } else {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001969 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Andersonbc271a02007-08-14 18:33:27 +00001970 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001971
Owen Anderson3ea90a72008-07-03 17:44:33 +00001972 // If the number we were assigned was a brand new VN, then we don't
1973 // need to do a lookup to see if the number already exists
1974 // somewhere in the domtree: it can't!
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001975 } else if (Num == NextNum) {
1976 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001977
Owen Andersonbfe133e2008-12-15 02:03:00 +00001978 // Perform fast-path value-number based elimination of values inherited from
1979 // dominators.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001980 } else if (Value *repl = lookupNumber(I->getParent(), Num)) {
Owen Anderson086b2c42007-12-08 01:37:09 +00001981 // Remove it!
Owen Anderson10ffa862007-07-31 23:27:13 +00001982 VN.erase(I);
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001983 I->replaceAllUsesWith(repl);
Duncan Sands19d0b472010-02-16 11:11:14 +00001984 if (MD && repl->getType()->isPointerTy())
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00001985 MD->invalidateCachedPointerInfo(repl);
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001986 toErase.push_back(I);
1987 return true;
Owen Andersonbfe133e2008-12-15 02:03:00 +00001988
Owen Anderson3ea90a72008-07-03 17:44:33 +00001989 } else {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001990 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001991 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001992
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001993 return false;
1994}
1995
Bill Wendling456e8852008-12-22 22:32:22 +00001996/// runOnFunction - This is the main transformation entry point for a function.
Owen Anderson676070d2007-08-14 18:04:11 +00001997bool GVN::runOnFunction(Function& F) {
Dan Gohman81132462009-11-14 02:27:51 +00001998 if (!NoLoads)
1999 MD = &getAnalysis<MemoryDependenceAnalysis>();
Chris Lattner8541ede2008-12-01 00:40:32 +00002000 DT = &getAnalysis<DominatorTree>();
Duncan Sands246b71c2010-11-12 21:10:24 +00002001 TD = getAnalysisIfAvailable<TargetData>();
Owen Andersonf7928602008-05-12 20:15:55 +00002002 VN.setAliasAnalysis(&getAnalysis<AliasAnalysis>());
Chris Lattner8541ede2008-12-01 00:40:32 +00002003 VN.setMemDep(MD);
2004 VN.setDomTree(DT);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002005
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002006 bool Changed = false;
2007 bool ShouldContinue = true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002008
Owen Andersonac310962008-07-16 17:52:31 +00002009 // Merge unconditional branches, allowing PRE to catch more
2010 // optimization opportunities.
2011 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002012 BasicBlock *BB = FI;
Owen Andersonac310962008-07-16 17:52:31 +00002013 ++FI;
Owen Andersonc0623812008-07-17 00:01:40 +00002014 bool removedBlock = MergeBlockIntoPredecessor(BB, this);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00002015 if (removedBlock) ++NumGVNBlocks;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002016
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002017 Changed |= removedBlock;
Owen Andersonac310962008-07-16 17:52:31 +00002018 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002019
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002020 unsigned Iteration = 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002021
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002022 while (ShouldContinue) {
David Greene2e6efc42010-01-05 01:27:17 +00002023 DEBUG(dbgs() << "GVN iteration: " << Iteration << "\n");
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002024 ShouldContinue = iterateOnFunction(F);
Bob Wilson92cdb6e2010-02-16 19:51:59 +00002025 if (splitCriticalEdges())
2026 ShouldContinue = true;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002027 Changed |= ShouldContinue;
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002028 ++Iteration;
Owen Anderson676070d2007-08-14 18:04:11 +00002029 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002030
Owen Anderson04a6e0b2008-07-18 18:03:38 +00002031 if (EnablePRE) {
Owen Anderson2fbfb702008-09-03 23:06:07 +00002032 bool PREChanged = true;
2033 while (PREChanged) {
2034 PREChanged = performPRE(F);
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002035 Changed |= PREChanged;
Owen Anderson2fbfb702008-09-03 23:06:07 +00002036 }
Owen Anderson04a6e0b2008-07-18 18:03:38 +00002037 }
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002038 // FIXME: Should perform GVN again after PRE does something. PRE can move
2039 // computations into blocks where they become fully redundant. Note that
2040 // we can't do this until PRE's critical edge splitting updates memdep.
2041 // Actually, when this happens, we should just fully integrate PRE into GVN.
Nuno Lopese3127f32008-10-10 16:25:50 +00002042
2043 cleanupGlobalSets();
2044
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002045 return Changed;
Owen Anderson676070d2007-08-14 18:04:11 +00002046}
2047
2048
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002049bool GVN::processBlock(BasicBlock *BB) {
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002050 // FIXME: Kill off toErase by doing erasing eagerly in a helper function (and
2051 // incrementing BI before processing an instruction).
Owen Andersonaccdca12008-06-12 19:25:32 +00002052 SmallVector<Instruction*, 8> toErase;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002053 bool ChangedFunction = false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002054
Owen Andersonaccdca12008-06-12 19:25:32 +00002055 for (BasicBlock::iterator BI = BB->begin(), BE = BB->end();
2056 BI != BE;) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002057 ChangedFunction |= processInstruction(BI, toErase);
Owen Andersonaccdca12008-06-12 19:25:32 +00002058 if (toErase.empty()) {
2059 ++BI;
2060 continue;
2061 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002062
Owen Andersonaccdca12008-06-12 19:25:32 +00002063 // If we need some instructions deleted, do it now.
2064 NumGVNInstr += toErase.size();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002065
Owen Andersonaccdca12008-06-12 19:25:32 +00002066 // Avoid iterator invalidation.
2067 bool AtStart = BI == BB->begin();
2068 if (!AtStart)
2069 --BI;
2070
2071 for (SmallVector<Instruction*, 4>::iterator I = toErase.begin(),
Chris Lattner8541ede2008-12-01 00:40:32 +00002072 E = toErase.end(); I != E; ++I) {
David Greene2e6efc42010-01-05 01:27:17 +00002073 DEBUG(dbgs() << "GVN removed: " << **I << '\n');
Dan Gohman81132462009-11-14 02:27:51 +00002074 if (MD) MD->removeInstruction(*I);
Owen Andersonaccdca12008-06-12 19:25:32 +00002075 (*I)->eraseFromParent();
Bill Wendlingebb6a542008-12-22 21:57:30 +00002076 DEBUG(verifyRemoved(*I));
Chris Lattner8541ede2008-12-01 00:40:32 +00002077 }
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002078 toErase.clear();
Owen Andersonaccdca12008-06-12 19:25:32 +00002079
2080 if (AtStart)
2081 BI = BB->begin();
2082 else
2083 ++BI;
Owen Andersonaccdca12008-06-12 19:25:32 +00002084 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002085
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002086 return ChangedFunction;
Owen Andersonaccdca12008-06-12 19:25:32 +00002087}
2088
Owen Anderson6a903bc2008-06-18 21:41:49 +00002089/// performPRE - Perform a purely local form of PRE that looks for diamond
2090/// control flow patterns and attempts to perform simple PRE at the join point.
Chris Lattnera546dcf2009-10-31 22:11:15 +00002091bool GVN::performPRE(Function &F) {
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002092 bool Changed = false;
Chris Lattnerf00aae42008-12-01 07:29:03 +00002093 DenseMap<BasicBlock*, Value*> predMap;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002094 for (df_iterator<BasicBlock*> DI = df_begin(&F.getEntryBlock()),
2095 DE = df_end(&F.getEntryBlock()); DI != DE; ++DI) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002096 BasicBlock *CurrentBlock = *DI;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002097
Owen Anderson6a903bc2008-06-18 21:41:49 +00002098 // Nothing to PRE in the entry block.
2099 if (CurrentBlock == &F.getEntryBlock()) continue;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002100
Owen Anderson6a903bc2008-06-18 21:41:49 +00002101 for (BasicBlock::iterator BI = CurrentBlock->begin(),
2102 BE = CurrentBlock->end(); BI != BE; ) {
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002103 Instruction *CurInst = BI++;
Duncan Sands1efabaa2009-05-06 06:49:50 +00002104
Victor Hernandez8acf2952009-10-23 21:09:37 +00002105 if (isa<AllocaInst>(CurInst) ||
Victor Hernandez5d034492009-09-18 22:35:49 +00002106 isa<TerminatorInst>(CurInst) || isa<PHINode>(CurInst) ||
Devang Patel92f86192009-10-14 17:29:00 +00002107 CurInst->getType()->isVoidTy() ||
Duncan Sands1efabaa2009-05-06 06:49:50 +00002108 CurInst->mayReadFromMemory() || CurInst->mayHaveSideEffects() ||
John Criswell073e4d12009-03-10 15:04:53 +00002109 isa<DbgInfoIntrinsic>(CurInst))
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002110 continue;
Owen Anderson03986072010-08-07 00:20:35 +00002111
2112 // We don't currently value number ANY inline asm calls.
2113 if (CallInst *CallI = dyn_cast<CallInst>(CurInst))
2114 if (CallI->isInlineAsm())
2115 continue;
Duncan Sands1efabaa2009-05-06 06:49:50 +00002116
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002117 uint32_t ValNo = VN.lookup(CurInst);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002118
Owen Anderson6a903bc2008-06-18 21:41:49 +00002119 // Look for the predecessors for PRE opportunities. We're
2120 // only trying to solve the basic diamond case, where
2121 // a value is computed in the successor and one predecessor,
2122 // but not the other. We also explicitly disallow cases
2123 // where the successor is its own predecessor, because they're
2124 // more complicated to get right.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002125 unsigned NumWith = 0;
2126 unsigned NumWithout = 0;
2127 BasicBlock *PREPred = 0;
Chris Lattnerf00aae42008-12-01 07:29:03 +00002128 predMap.clear();
2129
Owen Anderson6a903bc2008-06-18 21:41:49 +00002130 for (pred_iterator PI = pred_begin(CurrentBlock),
2131 PE = pred_end(CurrentBlock); PI != PE; ++PI) {
Gabor Greifb0d56ff2010-07-09 14:36:49 +00002132 BasicBlock *P = *PI;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002133 // We're not interested in PRE where the block is its
Bob Wilson76e8c592010-02-03 00:33:21 +00002134 // own predecessor, or in blocks with predecessors
Owen Anderson1b3ea962008-06-20 01:15:47 +00002135 // that are not reachable.
Gabor Greifb0d56ff2010-07-09 14:36:49 +00002136 if (P == CurrentBlock) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002137 NumWithout = 2;
Owen Anderson1b3ea962008-06-20 01:15:47 +00002138 break;
Gabor Greifb0d56ff2010-07-09 14:36:49 +00002139 } else if (!localAvail.count(P)) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002140 NumWithout = 2;
Owen Anderson1b3ea962008-06-20 01:15:47 +00002141 break;
2142 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002143
2144 DenseMap<uint32_t, Value*>::iterator predV =
Gabor Greifb0d56ff2010-07-09 14:36:49 +00002145 localAvail[P]->table.find(ValNo);
2146 if (predV == localAvail[P]->table.end()) {
2147 PREPred = P;
Dan Gohmand2d1ae12010-06-22 15:08:57 +00002148 ++NumWithout;
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002149 } else if (predV->second == CurInst) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002150 NumWithout = 2;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002151 } else {
Gabor Greifb0d56ff2010-07-09 14:36:49 +00002152 predMap[P] = predV->second;
Dan Gohmand2d1ae12010-06-22 15:08:57 +00002153 ++NumWith;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002154 }
2155 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002156
Owen Anderson6a903bc2008-06-18 21:41:49 +00002157 // Don't do PRE when it might increase code size, i.e. when
2158 // we would need to insert instructions in more than one pred.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002159 if (NumWithout != 1 || NumWith == 0)
Owen Anderson6a903bc2008-06-18 21:41:49 +00002160 continue;
Chris Lattnera546dcf2009-10-31 22:11:15 +00002161
2162 // Don't do PRE across indirect branch.
2163 if (isa<IndirectBrInst>(PREPred->getTerminator()))
2164 continue;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002165
Owen Andersonfdf9f162008-06-19 19:54:19 +00002166 // We can't do PRE safely on a critical edge, so instead we schedule
2167 // the edge to be split and perform the PRE the next time we iterate
2168 // on the function.
Bob Wilsonaff96b22010-02-16 21:06:42 +00002169 unsigned SuccNum = GetSuccessorNumber(PREPred, CurrentBlock);
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002170 if (isCriticalEdge(PREPred->getTerminator(), SuccNum)) {
2171 toSplit.push_back(std::make_pair(PREPred->getTerminator(), SuccNum));
Owen Andersonfdf9f162008-06-19 19:54:19 +00002172 continue;
2173 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002174
Bob Wilson76e8c592010-02-03 00:33:21 +00002175 // Instantiate the expression in the predecessor that lacked it.
Owen Anderson6a903bc2008-06-18 21:41:49 +00002176 // Because we are going top-down through the block, all value numbers
2177 // will be available in the predecessor by the time we need them. Any
Bob Wilson76e8c592010-02-03 00:33:21 +00002178 // that weren't originally present will have been instantiated earlier
Owen Anderson6a903bc2008-06-18 21:41:49 +00002179 // in this loop.
Nick Lewycky42fb7452009-09-27 07:38:41 +00002180 Instruction *PREInstr = CurInst->clone();
Owen Anderson6a903bc2008-06-18 21:41:49 +00002181 bool success = true;
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002182 for (unsigned i = 0, e = CurInst->getNumOperands(); i != e; ++i) {
2183 Value *Op = PREInstr->getOperand(i);
2184 if (isa<Argument>(Op) || isa<Constant>(Op) || isa<GlobalValue>(Op))
2185 continue;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002186
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002187 if (Value *V = lookupNumber(PREPred, VN.lookup(Op))) {
2188 PREInstr->setOperand(i, V);
2189 } else {
2190 success = false;
2191 break;
Owen Anderson8e462e92008-07-11 20:05:13 +00002192 }
Owen Anderson6a903bc2008-06-18 21:41:49 +00002193 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002194
Owen Anderson6a903bc2008-06-18 21:41:49 +00002195 // Fail out if we encounter an operand that is not available in
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002196 // the PRE predecessor. This is typically because of loads which
Owen Anderson6a903bc2008-06-18 21:41:49 +00002197 // are not value numbered precisely.
2198 if (!success) {
2199 delete PREInstr;
Bill Wendling3c793442008-12-22 22:14:07 +00002200 DEBUG(verifyRemoved(PREInstr));
Owen Anderson6a903bc2008-06-18 21:41:49 +00002201 continue;
2202 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002203
Owen Anderson6a903bc2008-06-18 21:41:49 +00002204 PREInstr->insertBefore(PREPred->getTerminator());
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002205 PREInstr->setName(CurInst->getName() + ".pre");
Owen Anderson1b3ea962008-06-20 01:15:47 +00002206 predMap[PREPred] = PREInstr;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002207 VN.add(PREInstr, ValNo);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00002208 ++NumGVNPRE;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002209
Owen Anderson6a903bc2008-06-18 21:41:49 +00002210 // Update the availability map to include the new instruction.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002211 localAvail[PREPred]->table.insert(std::make_pair(ValNo, PREInstr));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002212
Owen Anderson6a903bc2008-06-18 21:41:49 +00002213 // Create a PHI to make the value available in this block.
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002214 PHINode* Phi = PHINode::Create(CurInst->getType(),
2215 CurInst->getName() + ".pre-phi",
Owen Anderson6a903bc2008-06-18 21:41:49 +00002216 CurrentBlock->begin());
2217 for (pred_iterator PI = pred_begin(CurrentBlock),
Gabor Greifd323f5e2010-07-09 14:48:08 +00002218 PE = pred_end(CurrentBlock); PI != PE; ++PI) {
2219 BasicBlock *P = *PI;
2220 Phi->addIncoming(predMap[P], P);
2221 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002222
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002223 VN.add(Phi, ValNo);
2224 localAvail[CurrentBlock]->table[ValNo] = Phi;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002225
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002226 CurInst->replaceAllUsesWith(Phi);
Duncan Sands19d0b472010-02-16 11:11:14 +00002227 if (MD && Phi->getType()->isPointerTy())
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00002228 MD->invalidateCachedPointerInfo(Phi);
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002229 VN.erase(CurInst);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002230
David Greene2e6efc42010-01-05 01:27:17 +00002231 DEBUG(dbgs() << "GVN PRE removed: " << *CurInst << '\n');
Dan Gohman81132462009-11-14 02:27:51 +00002232 if (MD) MD->removeInstruction(CurInst);
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002233 CurInst->eraseFromParent();
Bill Wendlingebb6a542008-12-22 21:57:30 +00002234 DEBUG(verifyRemoved(CurInst));
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002235 Changed = true;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002236 }
2237 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002238
Bob Wilson92cdb6e2010-02-16 19:51:59 +00002239 if (splitCriticalEdges())
2240 Changed = true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002241
Bob Wilson92cdb6e2010-02-16 19:51:59 +00002242 return Changed;
2243}
2244
2245/// splitCriticalEdges - Split critical edges found during the previous
2246/// iteration that may enable further optimization.
2247bool GVN::splitCriticalEdges() {
2248 if (toSplit.empty())
2249 return false;
2250 do {
2251 std::pair<TerminatorInst*, unsigned> Edge = toSplit.pop_back_val();
2252 SplitCriticalEdge(Edge.first, Edge.second, this);
2253 } while (!toSplit.empty());
Evan Cheng7263cf8432010-03-01 22:23:12 +00002254 if (MD) MD->invalidateCachedPredecessors();
Bob Wilson92cdb6e2010-02-16 19:51:59 +00002255 return true;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002256}
2257
Bill Wendling456e8852008-12-22 22:32:22 +00002258/// iterateOnFunction - Executes one iteration of GVN
Owen Anderson676070d2007-08-14 18:04:11 +00002259bool GVN::iterateOnFunction(Function &F) {
Nuno Lopese3127f32008-10-10 16:25:50 +00002260 cleanupGlobalSets();
Chris Lattnerbeb216d2008-03-21 21:33:23 +00002261
Owen Anderson98f912b2009-04-01 23:53:49 +00002262 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2263 DE = df_end(DT->getRootNode()); DI != DE; ++DI) {
2264 if (DI->getIDom())
2265 localAvail[DI->getBlock()] =
2266 new ValueNumberScope(localAvail[DI->getIDom()->getBlock()]);
2267 else
2268 localAvail[DI->getBlock()] = new ValueNumberScope(0);
2269 }
2270
Owen Andersonab6ec2e2007-07-24 17:55:58 +00002271 // Top-down walk of the dominator tree
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002272 bool Changed = false;
Owen Anderson03aacba2008-12-15 03:52:17 +00002273#if 0
2274 // Needed for value numbering with phi construction to work.
Owen Andersonbfe133e2008-12-15 02:03:00 +00002275 ReversePostOrderTraversal<Function*> RPOT(&F);
2276 for (ReversePostOrderTraversal<Function*>::rpo_iterator RI = RPOT.begin(),
2277 RE = RPOT.end(); RI != RE; ++RI)
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002278 Changed |= processBlock(*RI);
Owen Anderson03aacba2008-12-15 03:52:17 +00002279#else
2280 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2281 DE = df_end(DT->getRootNode()); DI != DE; ++DI)
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002282 Changed |= processBlock(DI->getBlock());
Owen Anderson03aacba2008-12-15 03:52:17 +00002283#endif
2284
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002285 return Changed;
Owen Andersonab6ec2e2007-07-24 17:55:58 +00002286}
Nuno Lopese3127f32008-10-10 16:25:50 +00002287
2288void GVN::cleanupGlobalSets() {
2289 VN.clear();
Nuno Lopese3127f32008-10-10 16:25:50 +00002290
2291 for (DenseMap<BasicBlock*, ValueNumberScope*>::iterator
2292 I = localAvail.begin(), E = localAvail.end(); I != E; ++I)
2293 delete I->second;
2294 localAvail.clear();
2295}
Bill Wendling6b18a392008-12-22 21:36:08 +00002296
2297/// verifyRemoved - Verify that the specified instruction does not occur in our
2298/// internal data structures.
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002299void GVN::verifyRemoved(const Instruction *Inst) const {
2300 VN.verifyRemoved(Inst);
Bill Wendling3c793442008-12-22 22:14:07 +00002301
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002302 // Walk through the value number scope to make sure the instruction isn't
2303 // ferreted away in it.
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +00002304 for (DenseMap<BasicBlock*, ValueNumberScope*>::const_iterator
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002305 I = localAvail.begin(), E = localAvail.end(); I != E; ++I) {
2306 const ValueNumberScope *VNS = I->second;
2307
2308 while (VNS) {
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +00002309 for (DenseMap<uint32_t, Value*>::const_iterator
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002310 II = VNS->table.begin(), IE = VNS->table.end(); II != IE; ++II) {
2311 assert(II->second != Inst && "Inst still in value numbering scope!");
2312 }
2313
2314 VNS = VNS->parent;
Bill Wendling3c793442008-12-22 22:14:07 +00002315 }
2316 }
Bill Wendling6b18a392008-12-22 21:36:08 +00002317}