blob: 70a02b270bcceffe1114950950ec4ece2c7e87c7 [file] [log] [blame]
Chris Lattnerd2a653a2008-12-05 07:49:08 +00001//===- GVN.cpp - Eliminate redundant values and loads ---------------------===//
Owen Andersonab6ec2e2007-07-24 17:55:58 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Owen Andersonab6ec2e2007-07-24 17:55:58 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This pass performs global value numbering to eliminate fully redundant
11// instructions. It also performs simple dead load elimination.
12//
John Criswell073e4d12009-03-10 15:04:53 +000013// Note that this pass does the value numbering itself; it does not use the
Matthijs Kooijman5afc2742008-06-05 07:55:49 +000014// ValueNumbering analysis passes.
15//
Owen Andersonab6ec2e2007-07-24 17:55:58 +000016//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "gvn"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000019#include "llvm/Transforms/Scalar.h"
Owen Anderson5e5599b2007-07-25 19:57:03 +000020#include "llvm/BasicBlock.h"
Owen Andersondbf23cc2007-07-26 18:26:51 +000021#include "llvm/Constants.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000022#include "llvm/DerivedTypes.h"
Chris Lattner17079fc2009-12-28 21:28:46 +000023#include "llvm/GlobalVariable.h"
Owen Andersondbf23cc2007-07-26 18:26:51 +000024#include "llvm/Function.h"
Devang Patele8c6d312009-03-06 02:59:27 +000025#include "llvm/IntrinsicInst.h"
Owen Andersonb5618da2009-07-03 00:17:18 +000026#include "llvm/LLVMContext.h"
Chris Lattner0a9616d2009-09-21 05:57:11 +000027#include "llvm/Operator.h"
Owen Andersondbf23cc2007-07-26 18:26:51 +000028#include "llvm/Value.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000029#include "llvm/ADT/DenseMap.h"
30#include "llvm/ADT/DepthFirstIterator.h"
Owen Andersonbfe133e2008-12-15 02:03:00 +000031#include "llvm/ADT/PostOrderIterator.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000032#include "llvm/ADT/SmallPtrSet.h"
33#include "llvm/ADT/SmallVector.h"
34#include "llvm/ADT/Statistic.h"
Owen Anderson09b83ba2007-10-18 19:39:33 +000035#include "llvm/Analysis/AliasAnalysis.h"
Chris Lattner778cb922009-12-06 05:29:56 +000036#include "llvm/Analysis/ConstantFolding.h"
37#include "llvm/Analysis/Dominators.h"
Dan Gohman826bdf82010-05-28 16:19:17 +000038#include "llvm/Analysis/Loads.h"
Victor Hernandezf390e042009-10-27 20:05:49 +000039#include "llvm/Analysis/MemoryBuiltins.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000040#include "llvm/Analysis/MemoryDependenceAnalysis.h"
Chris Lattner972e6d82009-12-09 01:59:31 +000041#include "llvm/Analysis/PHITransAddr.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000042#include "llvm/Support/CFG.h"
Owen Andersone780d662008-06-19 19:57:25 +000043#include "llvm/Support/CommandLine.h"
Chris Lattnerd528b212008-03-29 04:36:18 +000044#include "llvm/Support/Debug.h"
Torok Edwin56d06592009-07-11 20:10:48 +000045#include "llvm/Support/ErrorHandling.h"
Chris Lattner0a9616d2009-09-21 05:57:11 +000046#include "llvm/Support/GetElementPtrTypeIterator.h"
Chris Lattner42376062009-12-06 01:57:02 +000047#include "llvm/Support/IRBuilder.h"
Daniel Dunbar0dd5e1e2009-07-25 00:23:56 +000048#include "llvm/Support/raw_ostream.h"
Chris Lattner1dd48c32009-09-20 19:03:47 +000049#include "llvm/Target/TargetData.h"
Owen Andersonfdf9f162008-06-19 19:54:19 +000050#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Dale Johannesen81b64632009-06-17 20:48:23 +000051#include "llvm/Transforms/Utils/Local.h"
Chris Lattnerb6c65fa2009-10-10 23:50:30 +000052#include "llvm/Transforms/Utils/SSAUpdater.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000053using namespace llvm;
54
Bill Wendling3c793442008-12-22 22:14:07 +000055STATISTIC(NumGVNInstr, "Number of instructions deleted");
56STATISTIC(NumGVNLoad, "Number of loads deleted");
57STATISTIC(NumGVNPRE, "Number of instructions PRE'd");
Owen Anderson53d546e2008-07-15 16:28:06 +000058STATISTIC(NumGVNBlocks, "Number of blocks merged");
Bill Wendling3c793442008-12-22 22:14:07 +000059STATISTIC(NumPRELoad, "Number of loads PRE'd");
Chris Lattner168be762008-03-22 04:13:49 +000060
Evan Cheng9598f932008-06-20 01:01:07 +000061static cl::opt<bool> EnablePRE("enable-pre",
Owen Andersonaddbe3e2008-07-17 19:41:00 +000062 cl::init(true), cl::Hidden);
Dan Gohmana8f8a852009-06-15 18:30:15 +000063static cl::opt<bool> EnableLoadPRE("enable-load-pre", cl::init(true));
Bob Wilson11361662010-02-28 05:34:05 +000064static cl::opt<bool> EnableFullLoadPRE("enable-full-load-pre", cl::init(false));
Owen Andersone780d662008-06-19 19:57:25 +000065
Owen Andersonab6ec2e2007-07-24 17:55:58 +000066//===----------------------------------------------------------------------===//
67// ValueTable Class
68//===----------------------------------------------------------------------===//
69
70/// This class holds the mapping between values and value numbers. It is used
71/// as an efficient mechanism to determine the expression-wise equivalence of
72/// two values.
73namespace {
Chris Lattner2dd09db2009-09-02 06:11:42 +000074 struct Expression {
Owen Andersoncdea3572010-01-17 19:33:27 +000075 enum ExpressionOpcode {
76 ADD = Instruction::Add,
77 FADD = Instruction::FAdd,
78 SUB = Instruction::Sub,
79 FSUB = Instruction::FSub,
80 MUL = Instruction::Mul,
81 FMUL = Instruction::FMul,
82 UDIV = Instruction::UDiv,
83 SDIV = Instruction::SDiv,
84 FDIV = Instruction::FDiv,
85 UREM = Instruction::URem,
86 SREM = Instruction::SRem,
87 FREM = Instruction::FRem,
88 SHL = Instruction::Shl,
89 LSHR = Instruction::LShr,
90 ASHR = Instruction::AShr,
91 AND = Instruction::And,
92 OR = Instruction::Or,
93 XOR = Instruction::Xor,
94 TRUNC = Instruction::Trunc,
95 ZEXT = Instruction::ZExt,
96 SEXT = Instruction::SExt,
97 FPTOUI = Instruction::FPToUI,
98 FPTOSI = Instruction::FPToSI,
99 UITOFP = Instruction::UIToFP,
100 SITOFP = Instruction::SIToFP,
101 FPTRUNC = Instruction::FPTrunc,
102 FPEXT = Instruction::FPExt,
103 PTRTOINT = Instruction::PtrToInt,
104 INTTOPTR = Instruction::IntToPtr,
105 BITCAST = Instruction::BitCast,
106 ICMPEQ, ICMPNE, ICMPUGT, ICMPUGE, ICMPULT, ICMPULE,
107 ICMPSGT, ICMPSGE, ICMPSLT, ICMPSLE, FCMPOEQ,
108 FCMPOGT, FCMPOGE, FCMPOLT, FCMPOLE, FCMPONE,
109 FCMPORD, FCMPUNO, FCMPUEQ, FCMPUGT, FCMPUGE,
110 FCMPULT, FCMPULE, FCMPUNE, EXTRACT, INSERT,
111 SHUFFLE, SELECT, GEP, CALL, CONSTANT,
112 INSERTVALUE, EXTRACTVALUE, EMPTY, TOMBSTONE };
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000113
114 ExpressionOpcode opcode;
115 const Type* type;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000116 SmallVector<uint32_t, 4> varargs;
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000117 Value *function;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000118
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000119 Expression() { }
120 Expression(ExpressionOpcode o) : opcode(o) { }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000121
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000122 bool operator==(const Expression &other) const {
123 if (opcode != other.opcode)
124 return false;
125 else if (opcode == EMPTY || opcode == TOMBSTONE)
126 return true;
127 else if (type != other.type)
128 return false;
Owen Anderson09b83ba2007-10-18 19:39:33 +0000129 else if (function != other.function)
130 return false;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000131 else {
132 if (varargs.size() != other.varargs.size())
133 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000134
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000135 for (size_t i = 0; i < varargs.size(); ++i)
136 if (varargs[i] != other.varargs[i])
137 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000138
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000139 return true;
140 }
141 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000142
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000143 bool operator!=(const Expression &other) const {
Bill Wendling86f01cb2008-12-22 22:16:31 +0000144 return !(*this == other);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000145 }
146 };
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000147
Chris Lattner2dd09db2009-09-02 06:11:42 +0000148 class ValueTable {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000149 private:
150 DenseMap<Value*, uint32_t> valueNumbering;
151 DenseMap<Expression, uint32_t> expressionNumbering;
Owen Andersonf7928602008-05-12 20:15:55 +0000152 AliasAnalysis* AA;
153 MemoryDependenceAnalysis* MD;
154 DominatorTree* DT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000155
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000156 uint32_t nextValueNumber;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000157
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000158 Expression::ExpressionOpcode getOpcode(CmpInst* C);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000159 Expression create_expression(BinaryOperator* BO);
160 Expression create_expression(CmpInst* C);
161 Expression create_expression(ShuffleVectorInst* V);
162 Expression create_expression(ExtractElementInst* C);
163 Expression create_expression(InsertElementInst* V);
164 Expression create_expression(SelectInst* V);
165 Expression create_expression(CastInst* C);
166 Expression create_expression(GetElementPtrInst* G);
Owen Anderson09b83ba2007-10-18 19:39:33 +0000167 Expression create_expression(CallInst* C);
Owen Anderson69057b82008-05-13 08:17:22 +0000168 Expression create_expression(Constant* C);
Owen Anderson168ad692009-10-19 22:14:22 +0000169 Expression create_expression(ExtractValueInst* C);
170 Expression create_expression(InsertValueInst* C);
171
172 uint32_t lookup_or_add_call(CallInst* C);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000173 public:
Dan Gohmanc4971722009-04-01 16:37:47 +0000174 ValueTable() : nextValueNumber(1) { }
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000175 uint32_t lookup_or_add(Value *V);
176 uint32_t lookup(Value *V) const;
177 void add(Value *V, uint32_t num);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000178 void clear();
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000179 void erase(Value *v);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000180 unsigned size();
Owen Andersonf7928602008-05-12 20:15:55 +0000181 void setAliasAnalysis(AliasAnalysis* A) { AA = A; }
Chris Lattner8541ede2008-12-01 00:40:32 +0000182 AliasAnalysis *getAliasAnalysis() const { return AA; }
Owen Andersonf7928602008-05-12 20:15:55 +0000183 void setMemDep(MemoryDependenceAnalysis* M) { MD = M; }
184 void setDomTree(DominatorTree* D) { DT = D; }
Owen Anderson3ea90a72008-07-03 17:44:33 +0000185 uint32_t getNextUnusedValueNumber() { return nextValueNumber; }
Bill Wendling6b18a392008-12-22 21:36:08 +0000186 void verifyRemoved(const Value *) const;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000187 };
188}
189
190namespace llvm {
Chris Lattner0625bd62007-09-17 18:34:04 +0000191template <> struct DenseMapInfo<Expression> {
Owen Anderson9699a6e2007-08-02 18:16:06 +0000192 static inline Expression getEmptyKey() {
193 return Expression(Expression::EMPTY);
194 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000195
Owen Anderson9699a6e2007-08-02 18:16:06 +0000196 static inline Expression getTombstoneKey() {
197 return Expression(Expression::TOMBSTONE);
198 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000199
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000200 static unsigned getHashValue(const Expression e) {
201 unsigned hash = e.opcode;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000202
Anton Korobeynikov1bfd1212008-02-20 11:26:25 +0000203 hash = ((unsigned)((uintptr_t)e.type >> 4) ^
Owen Anderson168ad692009-10-19 22:14:22 +0000204 (unsigned)((uintptr_t)e.type >> 9));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000205
Owen Anderson9699a6e2007-08-02 18:16:06 +0000206 for (SmallVector<uint32_t, 4>::const_iterator I = e.varargs.begin(),
207 E = e.varargs.end(); I != E; ++I)
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000208 hash = *I + hash * 37;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000209
Anton Korobeynikov1bfd1212008-02-20 11:26:25 +0000210 hash = ((unsigned)((uintptr_t)e.function >> 4) ^
211 (unsigned)((uintptr_t)e.function >> 9)) +
212 hash * 37;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000213
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000214 return hash;
215 }
Chris Lattner0625bd62007-09-17 18:34:04 +0000216 static bool isEqual(const Expression &LHS, const Expression &RHS) {
217 return LHS == RHS;
218 }
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000219};
Chris Lattner45d040b2009-12-15 07:26:43 +0000220
221template <>
222struct isPodLike<Expression> { static const bool value = true; };
223
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000224}
225
226//===----------------------------------------------------------------------===//
227// ValueTable Internal Functions
228//===----------------------------------------------------------------------===//
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000229
230Expression::ExpressionOpcode ValueTable::getOpcode(CmpInst* C) {
Nick Lewyckya21d3da2009-07-08 03:04:38 +0000231 if (isa<ICmpInst>(C)) {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000232 switch (C->getPredicate()) {
Chris Lattner2876a642008-03-21 21:14:38 +0000233 default: // THIS SHOULD NEVER HAPPEN
Torok Edwinfbcc6632009-07-14 16:55:14 +0000234 llvm_unreachable("Comparison with unknown predicate?");
Chris Lattner2876a642008-03-21 21:14:38 +0000235 case ICmpInst::ICMP_EQ: return Expression::ICMPEQ;
236 case ICmpInst::ICMP_NE: return Expression::ICMPNE;
237 case ICmpInst::ICMP_UGT: return Expression::ICMPUGT;
238 case ICmpInst::ICMP_UGE: return Expression::ICMPUGE;
239 case ICmpInst::ICMP_ULT: return Expression::ICMPULT;
240 case ICmpInst::ICMP_ULE: return Expression::ICMPULE;
241 case ICmpInst::ICMP_SGT: return Expression::ICMPSGT;
242 case ICmpInst::ICMP_SGE: return Expression::ICMPSGE;
243 case ICmpInst::ICMP_SLT: return Expression::ICMPSLT;
244 case ICmpInst::ICMP_SLE: return Expression::ICMPSLE;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000245 }
Nick Lewyckya21d3da2009-07-08 03:04:38 +0000246 } else {
247 switch (C->getPredicate()) {
248 default: // THIS SHOULD NEVER HAPPEN
Torok Edwinfbcc6632009-07-14 16:55:14 +0000249 llvm_unreachable("Comparison with unknown predicate?");
Nick Lewyckya21d3da2009-07-08 03:04:38 +0000250 case FCmpInst::FCMP_OEQ: return Expression::FCMPOEQ;
251 case FCmpInst::FCMP_OGT: return Expression::FCMPOGT;
252 case FCmpInst::FCMP_OGE: return Expression::FCMPOGE;
253 case FCmpInst::FCMP_OLT: return Expression::FCMPOLT;
254 case FCmpInst::FCMP_OLE: return Expression::FCMPOLE;
255 case FCmpInst::FCMP_ONE: return Expression::FCMPONE;
256 case FCmpInst::FCMP_ORD: return Expression::FCMPORD;
257 case FCmpInst::FCMP_UNO: return Expression::FCMPUNO;
258 case FCmpInst::FCMP_UEQ: return Expression::FCMPUEQ;
259 case FCmpInst::FCMP_UGT: return Expression::FCMPUGT;
260 case FCmpInst::FCMP_UGE: return Expression::FCMPUGE;
261 case FCmpInst::FCMP_ULT: return Expression::FCMPULT;
262 case FCmpInst::FCMP_ULE: return Expression::FCMPULE;
263 case FCmpInst::FCMP_UNE: return Expression::FCMPUNE;
264 }
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000265 }
266}
267
Owen Anderson09b83ba2007-10-18 19:39:33 +0000268Expression ValueTable::create_expression(CallInst* C) {
269 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000270
Owen Anderson09b83ba2007-10-18 19:39:33 +0000271 e.type = C->getType();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000272 e.function = C->getCalledFunction();
273 e.opcode = Expression::CALL;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000274
Gabor Greif5bcaa552010-06-24 10:04:07 +0000275 CallSite CS(C);
276 for (CallInst::op_iterator I = CS.arg_begin(), E = CS.arg_end();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000277 I != E; ++I)
Owen Anderson1e73f292008-04-11 05:11:49 +0000278 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000279
Owen Anderson09b83ba2007-10-18 19:39:33 +0000280 return e;
281}
282
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000283Expression ValueTable::create_expression(BinaryOperator* BO) {
284 Expression e;
Owen Anderson168ad692009-10-19 22:14:22 +0000285 e.varargs.push_back(lookup_or_add(BO->getOperand(0)));
286 e.varargs.push_back(lookup_or_add(BO->getOperand(1)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000287 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000288 e.type = BO->getType();
Owen Andersoncdea3572010-01-17 19:33:27 +0000289 e.opcode = static_cast<Expression::ExpressionOpcode>(BO->getOpcode());
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000290
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000291 return e;
292}
293
294Expression ValueTable::create_expression(CmpInst* C) {
295 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000296
Owen Anderson168ad692009-10-19 22:14:22 +0000297 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
298 e.varargs.push_back(lookup_or_add(C->getOperand(1)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000299 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000300 e.type = C->getType();
301 e.opcode = getOpcode(C);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000302
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000303 return e;
304}
305
306Expression ValueTable::create_expression(CastInst* C) {
307 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000308
Owen Anderson168ad692009-10-19 22:14:22 +0000309 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000310 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000311 e.type = C->getType();
Owen Andersoncdea3572010-01-17 19:33:27 +0000312 e.opcode = static_cast<Expression::ExpressionOpcode>(C->getOpcode());
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000313
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000314 return e;
315}
316
317Expression ValueTable::create_expression(ShuffleVectorInst* S) {
318 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000319
Owen Anderson168ad692009-10-19 22:14:22 +0000320 e.varargs.push_back(lookup_or_add(S->getOperand(0)));
321 e.varargs.push_back(lookup_or_add(S->getOperand(1)));
322 e.varargs.push_back(lookup_or_add(S->getOperand(2)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000323 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000324 e.type = S->getType();
325 e.opcode = Expression::SHUFFLE;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000326
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000327 return e;
328}
329
330Expression ValueTable::create_expression(ExtractElementInst* E) {
331 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000332
Owen Anderson168ad692009-10-19 22:14:22 +0000333 e.varargs.push_back(lookup_or_add(E->getOperand(0)));
334 e.varargs.push_back(lookup_or_add(E->getOperand(1)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000335 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000336 e.type = E->getType();
337 e.opcode = Expression::EXTRACT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000338
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000339 return e;
340}
341
342Expression ValueTable::create_expression(InsertElementInst* I) {
343 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000344
Owen Anderson168ad692009-10-19 22:14:22 +0000345 e.varargs.push_back(lookup_or_add(I->getOperand(0)));
346 e.varargs.push_back(lookup_or_add(I->getOperand(1)));
347 e.varargs.push_back(lookup_or_add(I->getOperand(2)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000348 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000349 e.type = I->getType();
350 e.opcode = Expression::INSERT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000351
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000352 return e;
353}
354
355Expression ValueTable::create_expression(SelectInst* I) {
356 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000357
Owen Anderson168ad692009-10-19 22:14:22 +0000358 e.varargs.push_back(lookup_or_add(I->getCondition()));
359 e.varargs.push_back(lookup_or_add(I->getTrueValue()));
360 e.varargs.push_back(lookup_or_add(I->getFalseValue()));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000361 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000362 e.type = I->getType();
363 e.opcode = Expression::SELECT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000364
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000365 return e;
366}
367
368Expression ValueTable::create_expression(GetElementPtrInst* G) {
369 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000370
Owen Anderson168ad692009-10-19 22:14:22 +0000371 e.varargs.push_back(lookup_or_add(G->getPointerOperand()));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000372 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000373 e.type = G->getType();
374 e.opcode = Expression::GEP;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000375
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000376 for (GetElementPtrInst::op_iterator I = G->idx_begin(), E = G->idx_end();
377 I != E; ++I)
Owen Anderson1e73f292008-04-11 05:11:49 +0000378 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000379
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000380 return e;
381}
382
Owen Anderson168ad692009-10-19 22:14:22 +0000383Expression ValueTable::create_expression(ExtractValueInst* E) {
384 Expression e;
385
386 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
387 for (ExtractValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
388 II != IE; ++II)
389 e.varargs.push_back(*II);
390 e.function = 0;
391 e.type = E->getType();
392 e.opcode = Expression::EXTRACTVALUE;
393
394 return e;
395}
396
397Expression ValueTable::create_expression(InsertValueInst* E) {
398 Expression e;
399
400 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
401 e.varargs.push_back(lookup_or_add(E->getInsertedValueOperand()));
402 for (InsertValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
403 II != IE; ++II)
404 e.varargs.push_back(*II);
405 e.function = 0;
406 e.type = E->getType();
407 e.opcode = Expression::INSERTVALUE;
408
409 return e;
410}
411
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000412//===----------------------------------------------------------------------===//
413// ValueTable External Functions
414//===----------------------------------------------------------------------===//
415
Owen Anderson6a903bc2008-06-18 21:41:49 +0000416/// add - Insert a value into the table with a specified value number.
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000417void ValueTable::add(Value *V, uint32_t num) {
Owen Anderson6a903bc2008-06-18 21:41:49 +0000418 valueNumbering.insert(std::make_pair(V, num));
419}
420
Owen Anderson168ad692009-10-19 22:14:22 +0000421uint32_t ValueTable::lookup_or_add_call(CallInst* C) {
422 if (AA->doesNotAccessMemory(C)) {
423 Expression exp = create_expression(C);
424 uint32_t& e = expressionNumbering[exp];
425 if (!e) e = nextValueNumber++;
426 valueNumbering[C] = e;
427 return e;
428 } else if (AA->onlyReadsMemory(C)) {
429 Expression exp = create_expression(C);
430 uint32_t& e = expressionNumbering[exp];
431 if (!e) {
432 e = nextValueNumber++;
433 valueNumbering[C] = e;
434 return e;
435 }
Dan Gohman81132462009-11-14 02:27:51 +0000436 if (!MD) {
437 e = nextValueNumber++;
438 valueNumbering[C] = e;
439 return e;
440 }
Owen Anderson168ad692009-10-19 22:14:22 +0000441
442 MemDepResult local_dep = MD->getDependency(C);
443
444 if (!local_dep.isDef() && !local_dep.isNonLocal()) {
445 valueNumbering[C] = nextValueNumber;
446 return nextValueNumber++;
447 }
448
449 if (local_dep.isDef()) {
450 CallInst* local_cdep = cast<CallInst>(local_dep.getInst());
451
Gabor Greiff628ecd2010-06-30 09:17:53 +0000452 if (local_cdep->getNumArgOperands() != C->getNumArgOperands()) {
Owen Anderson168ad692009-10-19 22:14:22 +0000453 valueNumbering[C] = nextValueNumber;
454 return nextValueNumber++;
455 }
456
Gabor Greif2d958d42010-06-24 10:17:17 +0000457 for (unsigned i = 0, e = C->getNumArgOperands(); i < e; ++i) {
458 uint32_t c_vn = lookup_or_add(C->getArgOperand(i));
459 uint32_t cd_vn = lookup_or_add(local_cdep->getArgOperand(i));
Owen Anderson168ad692009-10-19 22:14:22 +0000460 if (c_vn != cd_vn) {
461 valueNumbering[C] = nextValueNumber;
462 return nextValueNumber++;
463 }
464 }
465
466 uint32_t v = lookup_or_add(local_cdep);
467 valueNumbering[C] = v;
468 return v;
469 }
470
471 // Non-local case.
472 const MemoryDependenceAnalysis::NonLocalDepInfo &deps =
473 MD->getNonLocalCallDependency(CallSite(C));
474 // FIXME: call/call dependencies for readonly calls should return def, not
475 // clobber! Move the checking logic to MemDep!
476 CallInst* cdep = 0;
477
478 // Check to see if we have a single dominating call instruction that is
479 // identical to C.
480 for (unsigned i = 0, e = deps.size(); i != e; ++i) {
Chris Lattner0c315472009-12-09 07:08:01 +0000481 const NonLocalDepEntry *I = &deps[i];
Owen Anderson168ad692009-10-19 22:14:22 +0000482 // Ignore non-local dependencies.
Chris Lattner0c315472009-12-09 07:08:01 +0000483 if (I->getResult().isNonLocal())
Owen Anderson168ad692009-10-19 22:14:22 +0000484 continue;
485
486 // We don't handle non-depedencies. If we already have a call, reject
487 // instruction dependencies.
Chris Lattner0c315472009-12-09 07:08:01 +0000488 if (I->getResult().isClobber() || cdep != 0) {
Owen Anderson168ad692009-10-19 22:14:22 +0000489 cdep = 0;
490 break;
491 }
492
Chris Lattner0c315472009-12-09 07:08:01 +0000493 CallInst *NonLocalDepCall = dyn_cast<CallInst>(I->getResult().getInst());
Owen Anderson168ad692009-10-19 22:14:22 +0000494 // FIXME: All duplicated with non-local case.
Chris Lattner0c315472009-12-09 07:08:01 +0000495 if (NonLocalDepCall && DT->properlyDominates(I->getBB(), C->getParent())){
Owen Anderson168ad692009-10-19 22:14:22 +0000496 cdep = NonLocalDepCall;
497 continue;
498 }
499
500 cdep = 0;
501 break;
502 }
503
504 if (!cdep) {
505 valueNumbering[C] = nextValueNumber;
506 return nextValueNumber++;
507 }
508
Gabor Greiff628ecd2010-06-30 09:17:53 +0000509 if (cdep->getNumArgOperands() != C->getNumArgOperands()) {
Owen Anderson168ad692009-10-19 22:14:22 +0000510 valueNumbering[C] = nextValueNumber;
511 return nextValueNumber++;
512 }
Gabor Greif2d958d42010-06-24 10:17:17 +0000513 for (unsigned i = 0, e = C->getNumArgOperands(); i < e; ++i) {
514 uint32_t c_vn = lookup_or_add(C->getArgOperand(i));
515 uint32_t cd_vn = lookup_or_add(cdep->getArgOperand(i));
Owen Anderson168ad692009-10-19 22:14:22 +0000516 if (c_vn != cd_vn) {
517 valueNumbering[C] = nextValueNumber;
518 return nextValueNumber++;
519 }
520 }
521
522 uint32_t v = lookup_or_add(cdep);
523 valueNumbering[C] = v;
524 return v;
525
526 } else {
527 valueNumbering[C] = nextValueNumber;
528 return nextValueNumber++;
529 }
530}
531
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000532/// lookup_or_add - Returns the value number for the specified value, assigning
533/// it a new number if it did not have one before.
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000534uint32_t ValueTable::lookup_or_add(Value *V) {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000535 DenseMap<Value*, uint32_t>::iterator VI = valueNumbering.find(V);
536 if (VI != valueNumbering.end())
537 return VI->second;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000538
Owen Anderson168ad692009-10-19 22:14:22 +0000539 if (!isa<Instruction>(V)) {
Owen Anderson1059b5b2009-10-19 21:14:57 +0000540 valueNumbering[V] = nextValueNumber;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000541 return nextValueNumber++;
542 }
Owen Anderson168ad692009-10-19 22:14:22 +0000543
544 Instruction* I = cast<Instruction>(V);
545 Expression exp;
546 switch (I->getOpcode()) {
547 case Instruction::Call:
548 return lookup_or_add_call(cast<CallInst>(I));
549 case Instruction::Add:
550 case Instruction::FAdd:
551 case Instruction::Sub:
552 case Instruction::FSub:
553 case Instruction::Mul:
554 case Instruction::FMul:
555 case Instruction::UDiv:
556 case Instruction::SDiv:
557 case Instruction::FDiv:
558 case Instruction::URem:
559 case Instruction::SRem:
560 case Instruction::FRem:
561 case Instruction::Shl:
562 case Instruction::LShr:
563 case Instruction::AShr:
564 case Instruction::And:
565 case Instruction::Or :
566 case Instruction::Xor:
567 exp = create_expression(cast<BinaryOperator>(I));
568 break;
569 case Instruction::ICmp:
570 case Instruction::FCmp:
571 exp = create_expression(cast<CmpInst>(I));
572 break;
573 case Instruction::Trunc:
574 case Instruction::ZExt:
575 case Instruction::SExt:
576 case Instruction::FPToUI:
577 case Instruction::FPToSI:
578 case Instruction::UIToFP:
579 case Instruction::SIToFP:
580 case Instruction::FPTrunc:
581 case Instruction::FPExt:
582 case Instruction::PtrToInt:
583 case Instruction::IntToPtr:
584 case Instruction::BitCast:
585 exp = create_expression(cast<CastInst>(I));
586 break;
587 case Instruction::Select:
588 exp = create_expression(cast<SelectInst>(I));
589 break;
590 case Instruction::ExtractElement:
591 exp = create_expression(cast<ExtractElementInst>(I));
592 break;
593 case Instruction::InsertElement:
594 exp = create_expression(cast<InsertElementInst>(I));
595 break;
596 case Instruction::ShuffleVector:
597 exp = create_expression(cast<ShuffleVectorInst>(I));
598 break;
599 case Instruction::ExtractValue:
600 exp = create_expression(cast<ExtractValueInst>(I));
601 break;
602 case Instruction::InsertValue:
603 exp = create_expression(cast<InsertValueInst>(I));
604 break;
605 case Instruction::GetElementPtr:
606 exp = create_expression(cast<GetElementPtrInst>(I));
607 break;
608 default:
609 valueNumbering[V] = nextValueNumber;
610 return nextValueNumber++;
611 }
612
613 uint32_t& e = expressionNumbering[exp];
614 if (!e) e = nextValueNumber++;
615 valueNumbering[V] = e;
616 return e;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000617}
618
619/// lookup - Returns the value number of the specified value. Fails if
620/// the value has not yet been numbered.
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000621uint32_t ValueTable::lookup(Value *V) const {
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +0000622 DenseMap<Value*, uint32_t>::const_iterator VI = valueNumbering.find(V);
Chris Lattner2876a642008-03-21 21:14:38 +0000623 assert(VI != valueNumbering.end() && "Value not numbered?");
624 return VI->second;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000625}
626
627/// clear - Remove all entries from the ValueTable
628void ValueTable::clear() {
629 valueNumbering.clear();
630 expressionNumbering.clear();
631 nextValueNumber = 1;
632}
633
Owen Anderson10ffa862007-07-31 23:27:13 +0000634/// erase - Remove a value from the value numbering
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000635void ValueTable::erase(Value *V) {
Owen Anderson10ffa862007-07-31 23:27:13 +0000636 valueNumbering.erase(V);
637}
638
Bill Wendling6b18a392008-12-22 21:36:08 +0000639/// verifyRemoved - Verify that the value is removed from all internal data
640/// structures.
641void ValueTable::verifyRemoved(const Value *V) const {
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +0000642 for (DenseMap<Value*, uint32_t>::const_iterator
Bill Wendling6b18a392008-12-22 21:36:08 +0000643 I = valueNumbering.begin(), E = valueNumbering.end(); I != E; ++I) {
644 assert(I->first != V && "Inst still occurs in value numbering map!");
645 }
646}
647
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000648//===----------------------------------------------------------------------===//
Bill Wendling456e8852008-12-22 22:32:22 +0000649// GVN Pass
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000650//===----------------------------------------------------------------------===//
651
652namespace {
Chris Lattner2dd09db2009-09-02 06:11:42 +0000653 struct ValueNumberScope {
Owen Anderson1b3ea962008-06-20 01:15:47 +0000654 ValueNumberScope* parent;
655 DenseMap<uint32_t, Value*> table;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000656
Owen Anderson1b3ea962008-06-20 01:15:47 +0000657 ValueNumberScope(ValueNumberScope* p) : parent(p) { }
658 };
659}
660
661namespace {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000662
Chris Lattner2dd09db2009-09-02 06:11:42 +0000663 class GVN : public FunctionPass {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000664 bool runOnFunction(Function &F);
665 public:
666 static char ID; // Pass identification, replacement for typeid
Bob Wilson11361662010-02-28 05:34:05 +0000667 explicit GVN(bool noloads = false)
668 : FunctionPass(&ID), NoLoads(noloads), MD(0) { }
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000669
670 private:
Dan Gohman81132462009-11-14 02:27:51 +0000671 bool NoLoads;
Chris Lattner8541ede2008-12-01 00:40:32 +0000672 MemoryDependenceAnalysis *MD;
673 DominatorTree *DT;
674
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000675 ValueTable VN;
Owen Anderson1b3ea962008-06-20 01:15:47 +0000676 DenseMap<BasicBlock*, ValueNumberScope*> localAvail;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000677
Bob Wilson92cdb6e2010-02-16 19:51:59 +0000678 // List of critical edges to be split between iterations.
679 SmallVector<std::pair<TerminatorInst*, unsigned>, 4> toSplit;
680
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000681 // This transformation requires dominator postdominator info
682 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000683 AU.addRequired<DominatorTree>();
Dan Gohman81132462009-11-14 02:27:51 +0000684 if (!NoLoads)
685 AU.addRequired<MemoryDependenceAnalysis>();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000686 AU.addRequired<AliasAnalysis>();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000687
Owen Anderson54e02192008-06-23 17:49:45 +0000688 AU.addPreserved<DominatorTree>();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000689 AU.addPreserved<AliasAnalysis>();
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000690 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000691
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000692 // Helper fuctions
693 // FIXME: eliminate or document these better
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000694 bool processLoad(LoadInst* L,
Chris Lattner804209d2008-03-21 22:01:16 +0000695 SmallVectorImpl<Instruction*> &toErase);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000696 bool processInstruction(Instruction *I,
Chris Lattner804209d2008-03-21 22:01:16 +0000697 SmallVectorImpl<Instruction*> &toErase);
Owen Anderson9699a6e2007-08-02 18:16:06 +0000698 bool processNonLocalLoad(LoadInst* L,
Chris Lattner804209d2008-03-21 22:01:16 +0000699 SmallVectorImpl<Instruction*> &toErase);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000700 bool processBlock(BasicBlock *BB);
Owen Anderson6a903bc2008-06-18 21:41:49 +0000701 void dump(DenseMap<uint32_t, Value*>& d);
Owen Anderson676070d2007-08-14 18:04:11 +0000702 bool iterateOnFunction(Function &F);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000703 Value *CollapsePhi(PHINode* p);
Owen Anderson6a903bc2008-06-18 21:41:49 +0000704 bool performPRE(Function& F);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000705 Value *lookupNumber(BasicBlock *BB, uint32_t num);
Nuno Lopese3127f32008-10-10 16:25:50 +0000706 void cleanupGlobalSets();
Bill Wendling6b18a392008-12-22 21:36:08 +0000707 void verifyRemoved(const Instruction *I) const;
Bob Wilson92cdb6e2010-02-16 19:51:59 +0000708 bool splitCriticalEdges();
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000709 };
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000710
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000711 char GVN::ID = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000712}
713
714// createGVNPass - The public interface to this file...
Bob Wilson11361662010-02-28 05:34:05 +0000715FunctionPass *llvm::createGVNPass(bool NoLoads) {
716 return new GVN(NoLoads);
Dan Gohman81132462009-11-14 02:27:51 +0000717}
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000718
719static RegisterPass<GVN> X("gvn",
720 "Global Value Numbering");
721
Owen Anderson6a903bc2008-06-18 21:41:49 +0000722void GVN::dump(DenseMap<uint32_t, Value*>& d) {
Dan Gohman57e80862009-12-18 03:25:51 +0000723 errs() << "{\n";
Owen Anderson6a903bc2008-06-18 21:41:49 +0000724 for (DenseMap<uint32_t, Value*>::iterator I = d.begin(),
Owen Anderson5e5599b2007-07-25 19:57:03 +0000725 E = d.end(); I != E; ++I) {
Dan Gohman57e80862009-12-18 03:25:51 +0000726 errs() << I->first << "\n";
Owen Anderson5e5599b2007-07-25 19:57:03 +0000727 I->second->dump();
728 }
Dan Gohman57e80862009-12-18 03:25:51 +0000729 errs() << "}\n";
Owen Anderson5e5599b2007-07-25 19:57:03 +0000730}
731
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000732static bool isSafeReplacement(PHINode* p, Instruction *inst) {
Owen Anderson109ca5a2009-08-26 22:55:11 +0000733 if (!isa<PHINode>(inst))
734 return true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000735
Owen Anderson109ca5a2009-08-26 22:55:11 +0000736 for (Instruction::use_iterator UI = p->use_begin(), E = p->use_end();
737 UI != E; ++UI)
738 if (PHINode* use_phi = dyn_cast<PHINode>(UI))
739 if (use_phi->getParent() == inst->getParent())
740 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000741
Owen Anderson109ca5a2009-08-26 22:55:11 +0000742 return true;
743}
744
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000745Value *GVN::CollapsePhi(PHINode *PN) {
746 Value *ConstVal = PN->hasConstantValue(DT);
747 if (!ConstVal) return 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000748
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000749 Instruction *Inst = dyn_cast<Instruction>(ConstVal);
750 if (!Inst)
751 return ConstVal;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000752
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000753 if (DT->dominates(Inst, PN))
754 if (isSafeReplacement(PN, Inst))
755 return Inst;
Owen Andersonf5023a72007-08-16 22:51:56 +0000756 return 0;
757}
Owen Anderson5e5599b2007-07-25 19:57:03 +0000758
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000759/// IsValueFullyAvailableInBlock - Return true if we can prove that the value
760/// we're analyzing is fully available in the specified block. As we go, keep
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000761/// track of which blocks we know are fully alive in FullyAvailableBlocks. This
762/// map is actually a tri-state map with the following values:
763/// 0) we know the block *is not* fully available.
764/// 1) we know the block *is* fully available.
765/// 2) we do not know whether the block is fully available or not, but we are
766/// currently speculating that it will be.
767/// 3) we are speculating for this block and have used that to speculate for
768/// other blocks.
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000769static bool IsValueFullyAvailableInBlock(BasicBlock *BB,
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000770 DenseMap<BasicBlock*, char> &FullyAvailableBlocks) {
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000771 // Optimistically assume that the block is fully available and check to see
772 // if we already know about this block in one lookup.
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000773 std::pair<DenseMap<BasicBlock*, char>::iterator, char> IV =
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000774 FullyAvailableBlocks.insert(std::make_pair(BB, 2));
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000775
776 // If the entry already existed for this block, return the precomputed value.
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000777 if (!IV.second) {
778 // If this is a speculative "available" value, mark it as being used for
779 // speculation of other blocks.
780 if (IV.first->second == 2)
781 IV.first->second = 3;
782 return IV.first->second != 0;
783 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000784
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000785 // Otherwise, see if it is fully available in all predecessors.
786 pred_iterator PI = pred_begin(BB), PE = pred_end(BB);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000787
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000788 // If this block has no predecessors, it isn't live-in here.
789 if (PI == PE)
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000790 goto SpeculationFailure;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000791
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000792 for (; PI != PE; ++PI)
793 // If the value isn't fully available in one of our predecessors, then it
794 // isn't fully available in this block either. Undo our previous
795 // optimistic assumption and bail out.
796 if (!IsValueFullyAvailableInBlock(*PI, FullyAvailableBlocks))
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000797 goto SpeculationFailure;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000798
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000799 return true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000800
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000801// SpeculationFailure - If we get here, we found out that this is not, after
802// all, a fully-available block. We have a problem if we speculated on this and
803// used the speculation to mark other blocks as available.
804SpeculationFailure:
805 char &BBVal = FullyAvailableBlocks[BB];
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000806
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000807 // If we didn't speculate on this, just return with it set to false.
808 if (BBVal == 2) {
809 BBVal = 0;
810 return false;
811 }
812
813 // If we did speculate on this value, we could have blocks set to 1 that are
814 // incorrect. Walk the (transitive) successors of this block and mark them as
815 // 0 if set to one.
816 SmallVector<BasicBlock*, 32> BBWorklist;
817 BBWorklist.push_back(BB);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000818
Dan Gohman28943872010-01-05 16:27:25 +0000819 do {
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000820 BasicBlock *Entry = BBWorklist.pop_back_val();
821 // Note that this sets blocks to 0 (unavailable) if they happen to not
822 // already be in FullyAvailableBlocks. This is safe.
823 char &EntryVal = FullyAvailableBlocks[Entry];
824 if (EntryVal == 0) continue; // Already unavailable.
825
826 // Mark as unavailable.
827 EntryVal = 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000828
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000829 for (succ_iterator I = succ_begin(Entry), E = succ_end(Entry); I != E; ++I)
830 BBWorklist.push_back(*I);
Dan Gohman28943872010-01-05 16:27:25 +0000831 } while (!BBWorklist.empty());
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000832
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000833 return false;
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000834}
835
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000836
Chris Lattner9045f232009-09-21 17:24:04 +0000837/// CanCoerceMustAliasedValueToLoad - Return true if
838/// CoerceAvailableValueToLoadType will succeed.
839static bool CanCoerceMustAliasedValueToLoad(Value *StoredVal,
840 const Type *LoadTy,
841 const TargetData &TD) {
842 // If the loaded or stored value is an first class array or struct, don't try
843 // to transform them. We need to be able to bitcast to integer.
Duncan Sands19d0b472010-02-16 11:11:14 +0000844 if (LoadTy->isStructTy() || LoadTy->isArrayTy() ||
845 StoredVal->getType()->isStructTy() ||
846 StoredVal->getType()->isArrayTy())
Chris Lattner9045f232009-09-21 17:24:04 +0000847 return false;
848
849 // The store has to be at least as big as the load.
850 if (TD.getTypeSizeInBits(StoredVal->getType()) <
851 TD.getTypeSizeInBits(LoadTy))
852 return false;
853
854 return true;
855}
856
857
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000858/// CoerceAvailableValueToLoadType - If we saw a store of a value to memory, and
859/// then a load from a must-aliased pointer of a different type, try to coerce
860/// the stored value. LoadedTy is the type of the load we want to replace and
861/// InsertPt is the place to insert new instructions.
862///
863/// If we can't do it, return null.
864static Value *CoerceAvailableValueToLoadType(Value *StoredVal,
865 const Type *LoadedTy,
866 Instruction *InsertPt,
867 const TargetData &TD) {
Chris Lattner9045f232009-09-21 17:24:04 +0000868 if (!CanCoerceMustAliasedValueToLoad(StoredVal, LoadedTy, TD))
869 return 0;
870
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000871 const Type *StoredValTy = StoredVal->getType();
872
Chris Lattner5a62d6e2010-05-08 20:01:44 +0000873 uint64_t StoreSize = TD.getTypeStoreSizeInBits(StoredValTy);
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000874 uint64_t LoadSize = TD.getTypeSizeInBits(LoadedTy);
875
876 // If the store and reload are the same size, we can always reuse it.
877 if (StoreSize == LoadSize) {
Duncan Sands19d0b472010-02-16 11:11:14 +0000878 if (StoredValTy->isPointerTy() && LoadedTy->isPointerTy()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000879 // Pointer to Pointer -> use bitcast.
880 return new BitCastInst(StoredVal, LoadedTy, "", InsertPt);
881 }
882
883 // Convert source pointers to integers, which can be bitcast.
Duncan Sands19d0b472010-02-16 11:11:14 +0000884 if (StoredValTy->isPointerTy()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000885 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
886 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
887 }
888
889 const Type *TypeToCastTo = LoadedTy;
Duncan Sands19d0b472010-02-16 11:11:14 +0000890 if (TypeToCastTo->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000891 TypeToCastTo = TD.getIntPtrType(StoredValTy->getContext());
892
893 if (StoredValTy != TypeToCastTo)
894 StoredVal = new BitCastInst(StoredVal, TypeToCastTo, "", InsertPt);
895
896 // Cast to pointer if the load needs a pointer type.
Duncan Sands19d0b472010-02-16 11:11:14 +0000897 if (LoadedTy->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000898 StoredVal = new IntToPtrInst(StoredVal, LoadedTy, "", InsertPt);
899
900 return StoredVal;
901 }
902
903 // If the loaded value is smaller than the available value, then we can
904 // extract out a piece from it. If the available value is too small, then we
905 // can't do anything.
Chris Lattner9045f232009-09-21 17:24:04 +0000906 assert(StoreSize >= LoadSize && "CanCoerceMustAliasedValueToLoad fail");
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000907
908 // Convert source pointers to integers, which can be manipulated.
Duncan Sands19d0b472010-02-16 11:11:14 +0000909 if (StoredValTy->isPointerTy()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000910 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
911 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
912 }
913
914 // Convert vectors and fp to integer, which can be manipulated.
Duncan Sands19d0b472010-02-16 11:11:14 +0000915 if (!StoredValTy->isIntegerTy()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000916 StoredValTy = IntegerType::get(StoredValTy->getContext(), StoreSize);
917 StoredVal = new BitCastInst(StoredVal, StoredValTy, "", InsertPt);
918 }
919
920 // If this is a big-endian system, we need to shift the value down to the low
921 // bits so that a truncate will work.
922 if (TD.isBigEndian()) {
923 Constant *Val = ConstantInt::get(StoredVal->getType(), StoreSize-LoadSize);
924 StoredVal = BinaryOperator::CreateLShr(StoredVal, Val, "tmp", InsertPt);
925 }
926
927 // Truncate the integer to the right size now.
928 const Type *NewIntTy = IntegerType::get(StoredValTy->getContext(), LoadSize);
929 StoredVal = new TruncInst(StoredVal, NewIntTy, "trunc", InsertPt);
930
931 if (LoadedTy == NewIntTy)
932 return StoredVal;
933
934 // If the result is a pointer, inttoptr.
Duncan Sands19d0b472010-02-16 11:11:14 +0000935 if (LoadedTy->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000936 return new IntToPtrInst(StoredVal, LoadedTy, "inttoptr", InsertPt);
937
938 // Otherwise, bitcast.
939 return new BitCastInst(StoredVal, LoadedTy, "bitcast", InsertPt);
940}
941
Chris Lattnerd28f9082009-09-21 06:24:16 +0000942/// GetBaseWithConstantOffset - Analyze the specified pointer to see if it can
943/// be expressed as a base pointer plus a constant offset. Return the base and
944/// offset to the caller.
945static Value *GetBaseWithConstantOffset(Value *Ptr, int64_t &Offset,
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000946 const TargetData &TD) {
Chris Lattnerd28f9082009-09-21 06:24:16 +0000947 Operator *PtrOp = dyn_cast<Operator>(Ptr);
948 if (PtrOp == 0) return Ptr;
949
950 // Just look through bitcasts.
951 if (PtrOp->getOpcode() == Instruction::BitCast)
952 return GetBaseWithConstantOffset(PtrOp->getOperand(0), Offset, TD);
953
954 // If this is a GEP with constant indices, we can look through it.
955 GEPOperator *GEP = dyn_cast<GEPOperator>(PtrOp);
956 if (GEP == 0 || !GEP->hasAllConstantIndices()) return Ptr;
957
958 gep_type_iterator GTI = gep_type_begin(GEP);
959 for (User::op_iterator I = GEP->idx_begin(), E = GEP->idx_end(); I != E;
960 ++I, ++GTI) {
961 ConstantInt *OpC = cast<ConstantInt>(*I);
962 if (OpC->isZero()) continue;
963
964 // Handle a struct and array indices which add their offset to the pointer.
965 if (const StructType *STy = dyn_cast<StructType>(*GTI)) {
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000966 Offset += TD.getStructLayout(STy)->getElementOffset(OpC->getZExtValue());
Chris Lattnerd28f9082009-09-21 06:24:16 +0000967 } else {
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000968 uint64_t Size = TD.getTypeAllocSize(GTI.getIndexedType());
Chris Lattnerd28f9082009-09-21 06:24:16 +0000969 Offset += OpC->getSExtValue()*Size;
970 }
971 }
972
973 // Re-sign extend from the pointer size if needed to get overflow edge cases
974 // right.
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000975 unsigned PtrSize = TD.getPointerSizeInBits();
Chris Lattnerd28f9082009-09-21 06:24:16 +0000976 if (PtrSize < 64)
977 Offset = (Offset << (64-PtrSize)) >> (64-PtrSize);
978
979 return GetBaseWithConstantOffset(GEP->getPointerOperand(), Offset, TD);
980}
981
982
Chris Lattner42376062009-12-06 01:57:02 +0000983/// AnalyzeLoadFromClobberingWrite - This function is called when we have a
984/// memdep query of a load that ends up being a clobbering memory write (store,
985/// memset, memcpy, memmove). This means that the write *may* provide bits used
986/// by the load but we can't be sure because the pointers don't mustalias.
987///
988/// Check this case to see if there is anything more we can do before we give
989/// up. This returns -1 if we have to give up, or a byte number in the stored
990/// value of the piece that feeds the load.
Chris Lattner0def8612009-12-09 07:34:10 +0000991static int AnalyzeLoadFromClobberingWrite(const Type *LoadTy, Value *LoadPtr,
992 Value *WritePtr,
Chris Lattner42376062009-12-06 01:57:02 +0000993 uint64_t WriteSizeInBits,
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000994 const TargetData &TD) {
Chris Lattner9045f232009-09-21 17:24:04 +0000995 // If the loaded or stored value is an first class array or struct, don't try
996 // to transform them. We need to be able to bitcast to integer.
Duncan Sands19d0b472010-02-16 11:11:14 +0000997 if (LoadTy->isStructTy() || LoadTy->isArrayTy())
Chris Lattner9045f232009-09-21 17:24:04 +0000998 return -1;
999
Chris Lattnerd28f9082009-09-21 06:24:16 +00001000 int64_t StoreOffset = 0, LoadOffset = 0;
Chris Lattner42376062009-12-06 01:57:02 +00001001 Value *StoreBase = GetBaseWithConstantOffset(WritePtr, StoreOffset, TD);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001002 Value *LoadBase =
Chris Lattner0def8612009-12-09 07:34:10 +00001003 GetBaseWithConstantOffset(LoadPtr, LoadOffset, TD);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001004 if (StoreBase != LoadBase)
1005 return -1;
1006
1007 // If the load and store are to the exact same address, they should have been
1008 // a must alias. AA must have gotten confused.
Chris Lattner05638042010-03-25 05:58:19 +00001009 // FIXME: Study to see if/when this happens. One case is forwarding a memset
1010 // to a load from the base of the memset.
Chris Lattnerd28f9082009-09-21 06:24:16 +00001011#if 0
Chris Lattner05638042010-03-25 05:58:19 +00001012 if (LoadOffset == StoreOffset) {
David Greene2e6efc42010-01-05 01:27:17 +00001013 dbgs() << "STORE/LOAD DEP WITH COMMON POINTER MISSED:\n"
Chris Lattnerd28f9082009-09-21 06:24:16 +00001014 << "Base = " << *StoreBase << "\n"
Chris Lattner42376062009-12-06 01:57:02 +00001015 << "Store Ptr = " << *WritePtr << "\n"
1016 << "Store Offs = " << StoreOffset << "\n"
Chris Lattner3ddf8042009-12-10 00:04:46 +00001017 << "Load Ptr = " << *LoadPtr << "\n";
Chris Lattner946b58d2009-12-09 02:41:54 +00001018 abort();
Chris Lattnerd28f9082009-09-21 06:24:16 +00001019 }
Chris Lattner05638042010-03-25 05:58:19 +00001020#endif
Chris Lattnerd28f9082009-09-21 06:24:16 +00001021
1022 // If the load and store don't overlap at all, the store doesn't provide
1023 // anything to the load. In this case, they really don't alias at all, AA
1024 // must have gotten confused.
1025 // FIXME: Investigate cases where this bails out, e.g. rdar://7238614. Then
1026 // remove this check, as it is duplicated with what we have below.
Chris Lattner0def8612009-12-09 07:34:10 +00001027 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001028
Chris Lattner42376062009-12-06 01:57:02 +00001029 if ((WriteSizeInBits & 7) | (LoadSize & 7))
Chris Lattnerd28f9082009-09-21 06:24:16 +00001030 return -1;
Chris Lattner42376062009-12-06 01:57:02 +00001031 uint64_t StoreSize = WriteSizeInBits >> 3; // Convert to bytes.
Chris Lattnerd28f9082009-09-21 06:24:16 +00001032 LoadSize >>= 3;
1033
1034
1035 bool isAAFailure = false;
Chris Lattner05638042010-03-25 05:58:19 +00001036 if (StoreOffset < LoadOffset)
Chris Lattnerd28f9082009-09-21 06:24:16 +00001037 isAAFailure = StoreOffset+int64_t(StoreSize) <= LoadOffset;
Chris Lattner05638042010-03-25 05:58:19 +00001038 else
Chris Lattnerd28f9082009-09-21 06:24:16 +00001039 isAAFailure = LoadOffset+int64_t(LoadSize) <= StoreOffset;
Chris Lattner05638042010-03-25 05:58:19 +00001040
Chris Lattnerd28f9082009-09-21 06:24:16 +00001041 if (isAAFailure) {
1042#if 0
David Greene2e6efc42010-01-05 01:27:17 +00001043 dbgs() << "STORE LOAD DEP WITH COMMON BASE:\n"
Chris Lattnerd28f9082009-09-21 06:24:16 +00001044 << "Base = " << *StoreBase << "\n"
Chris Lattner42376062009-12-06 01:57:02 +00001045 << "Store Ptr = " << *WritePtr << "\n"
1046 << "Store Offs = " << StoreOffset << "\n"
Chris Lattner3ddf8042009-12-10 00:04:46 +00001047 << "Load Ptr = " << *LoadPtr << "\n";
Chris Lattner946b58d2009-12-09 02:41:54 +00001048 abort();
Chris Lattnerd28f9082009-09-21 06:24:16 +00001049#endif
1050 return -1;
1051 }
1052
1053 // If the Load isn't completely contained within the stored bits, we don't
1054 // have all the bits to feed it. We could do something crazy in the future
1055 // (issue a smaller load then merge the bits in) but this seems unlikely to be
1056 // valuable.
1057 if (StoreOffset > LoadOffset ||
1058 StoreOffset+StoreSize < LoadOffset+LoadSize)
1059 return -1;
1060
1061 // Okay, we can do this transformation. Return the number of bytes into the
1062 // store that the load is.
1063 return LoadOffset-StoreOffset;
1064}
1065
Chris Lattner42376062009-12-06 01:57:02 +00001066/// AnalyzeLoadFromClobberingStore - This function is called when we have a
1067/// memdep query of a load that ends up being a clobbering store.
Chris Lattner07df9ef2009-12-09 07:37:07 +00001068static int AnalyzeLoadFromClobberingStore(const Type *LoadTy, Value *LoadPtr,
1069 StoreInst *DepSI,
Chris Lattner42376062009-12-06 01:57:02 +00001070 const TargetData &TD) {
1071 // Cannot handle reading from store of first-class aggregate yet.
Duncan Sands19d0b472010-02-16 11:11:14 +00001072 if (DepSI->getOperand(0)->getType()->isStructTy() ||
1073 DepSI->getOperand(0)->getType()->isArrayTy())
Chris Lattner42376062009-12-06 01:57:02 +00001074 return -1;
1075
1076 Value *StorePtr = DepSI->getPointerOperand();
Chris Lattner9ccc8792009-12-10 00:11:45 +00001077 uint64_t StoreSize = TD.getTypeSizeInBits(DepSI->getOperand(0)->getType());
Chris Lattner07df9ef2009-12-09 07:37:07 +00001078 return AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr,
Chris Lattner0def8612009-12-09 07:34:10 +00001079 StorePtr, StoreSize, TD);
Chris Lattner42376062009-12-06 01:57:02 +00001080}
1081
Chris Lattner07df9ef2009-12-09 07:37:07 +00001082static int AnalyzeLoadFromClobberingMemInst(const Type *LoadTy, Value *LoadPtr,
1083 MemIntrinsic *MI,
Chris Lattner42376062009-12-06 01:57:02 +00001084 const TargetData &TD) {
1085 // If the mem operation is a non-constant size, we can't handle it.
1086 ConstantInt *SizeCst = dyn_cast<ConstantInt>(MI->getLength());
1087 if (SizeCst == 0) return -1;
1088 uint64_t MemSizeInBits = SizeCst->getZExtValue()*8;
Chris Lattner778cb922009-12-06 05:29:56 +00001089
1090 // If this is memset, we just need to see if the offset is valid in the size
1091 // of the memset..
Chris Lattner42376062009-12-06 01:57:02 +00001092 if (MI->getIntrinsicID() == Intrinsic::memset)
Chris Lattner07df9ef2009-12-09 07:37:07 +00001093 return AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr, MI->getDest(),
1094 MemSizeInBits, TD);
Chris Lattner42376062009-12-06 01:57:02 +00001095
Chris Lattner778cb922009-12-06 05:29:56 +00001096 // If we have a memcpy/memmove, the only case we can handle is if this is a
1097 // copy from constant memory. In that case, we can read directly from the
1098 // constant memory.
1099 MemTransferInst *MTI = cast<MemTransferInst>(MI);
1100
1101 Constant *Src = dyn_cast<Constant>(MTI->getSource());
1102 if (Src == 0) return -1;
1103
1104 GlobalVariable *GV = dyn_cast<GlobalVariable>(Src->getUnderlyingObject());
1105 if (GV == 0 || !GV->isConstant()) return -1;
1106
1107 // See if the access is within the bounds of the transfer.
Chris Lattner07df9ef2009-12-09 07:37:07 +00001108 int Offset = AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr,
1109 MI->getDest(), MemSizeInBits, TD);
Chris Lattner778cb922009-12-06 05:29:56 +00001110 if (Offset == -1)
1111 return Offset;
1112
1113 // Otherwise, see if we can constant fold a load from the constant with the
1114 // offset applied as appropriate.
1115 Src = ConstantExpr::getBitCast(Src,
1116 llvm::Type::getInt8PtrTy(Src->getContext()));
1117 Constant *OffsetCst =
1118 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1119 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
Chris Lattner07df9ef2009-12-09 07:37:07 +00001120 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(LoadTy));
Chris Lattner778cb922009-12-06 05:29:56 +00001121 if (ConstantFoldLoadFromConstPtr(Src, &TD))
1122 return Offset;
Chris Lattner42376062009-12-06 01:57:02 +00001123 return -1;
1124}
1125
Chris Lattnerd28f9082009-09-21 06:24:16 +00001126
1127/// GetStoreValueForLoad - This function is called when we have a
1128/// memdep query of a load that ends up being a clobbering store. This means
1129/// that the store *may* provide bits used by the load but we can't be sure
1130/// because the pointers don't mustalias. Check this case to see if there is
1131/// anything more we can do before we give up.
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001132static Value *GetStoreValueForLoad(Value *SrcVal, unsigned Offset,
1133 const Type *LoadTy,
1134 Instruction *InsertPt, const TargetData &TD){
Chris Lattnerd28f9082009-09-21 06:24:16 +00001135 LLVMContext &Ctx = SrcVal->getType()->getContext();
1136
Chris Lattner5a62d6e2010-05-08 20:01:44 +00001137 uint64_t StoreSize = (TD.getTypeSizeInBits(SrcVal->getType()) + 7) / 8;
1138 uint64_t LoadSize = (TD.getTypeSizeInBits(LoadTy) + 7) / 8;
Chris Lattnerd28f9082009-09-21 06:24:16 +00001139
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001140 IRBuilder<> Builder(InsertPt->getParent(), InsertPt);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001141
1142 // Compute which bits of the stored value are being used by the load. Convert
1143 // to an integer type to start with.
Duncan Sands19d0b472010-02-16 11:11:14 +00001144 if (SrcVal->getType()->isPointerTy())
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001145 SrcVal = Builder.CreatePtrToInt(SrcVal, TD.getIntPtrType(Ctx), "tmp");
Duncan Sands19d0b472010-02-16 11:11:14 +00001146 if (!SrcVal->getType()->isIntegerTy())
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001147 SrcVal = Builder.CreateBitCast(SrcVal, IntegerType::get(Ctx, StoreSize*8),
1148 "tmp");
Chris Lattnerd28f9082009-09-21 06:24:16 +00001149
1150 // Shift the bits to the least significant depending on endianness.
1151 unsigned ShiftAmt;
Chris Lattner42376062009-12-06 01:57:02 +00001152 if (TD.isLittleEndian())
Chris Lattnerd28f9082009-09-21 06:24:16 +00001153 ShiftAmt = Offset*8;
Chris Lattner42376062009-12-06 01:57:02 +00001154 else
Chris Lattner24705382009-09-21 17:55:47 +00001155 ShiftAmt = (StoreSize-LoadSize-Offset)*8;
Chris Lattnerd28f9082009-09-21 06:24:16 +00001156
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001157 if (ShiftAmt)
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001158 SrcVal = Builder.CreateLShr(SrcVal, ShiftAmt, "tmp");
Chris Lattnerd28f9082009-09-21 06:24:16 +00001159
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001160 if (LoadSize != StoreSize)
Chris Lattnerf8ba1252009-12-09 18:13:28 +00001161 SrcVal = Builder.CreateTrunc(SrcVal, IntegerType::get(Ctx, LoadSize*8),
1162 "tmp");
Chris Lattnerd28f9082009-09-21 06:24:16 +00001163
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001164 return CoerceAvailableValueToLoadType(SrcVal, LoadTy, InsertPt, TD);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001165}
1166
Chris Lattner42376062009-12-06 01:57:02 +00001167/// GetMemInstValueForLoad - This function is called when we have a
1168/// memdep query of a load that ends up being a clobbering mem intrinsic.
1169static Value *GetMemInstValueForLoad(MemIntrinsic *SrcInst, unsigned Offset,
1170 const Type *LoadTy, Instruction *InsertPt,
1171 const TargetData &TD){
1172 LLVMContext &Ctx = LoadTy->getContext();
1173 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy)/8;
1174
1175 IRBuilder<> Builder(InsertPt->getParent(), InsertPt);
1176
1177 // We know that this method is only called when the mem transfer fully
1178 // provides the bits for the load.
1179 if (MemSetInst *MSI = dyn_cast<MemSetInst>(SrcInst)) {
1180 // memset(P, 'x', 1234) -> splat('x'), even if x is a variable, and
1181 // independently of what the offset is.
1182 Value *Val = MSI->getValue();
1183 if (LoadSize != 1)
1184 Val = Builder.CreateZExt(Val, IntegerType::get(Ctx, LoadSize*8));
1185
1186 Value *OneElt = Val;
1187
1188 // Splat the value out to the right number of bits.
1189 for (unsigned NumBytesSet = 1; NumBytesSet != LoadSize; ) {
1190 // If we can double the number of bytes set, do it.
1191 if (NumBytesSet*2 <= LoadSize) {
1192 Value *ShVal = Builder.CreateShl(Val, NumBytesSet*8);
1193 Val = Builder.CreateOr(Val, ShVal);
1194 NumBytesSet <<= 1;
1195 continue;
1196 }
1197
1198 // Otherwise insert one byte at a time.
1199 Value *ShVal = Builder.CreateShl(Val, 1*8);
1200 Val = Builder.CreateOr(OneElt, ShVal);
1201 ++NumBytesSet;
1202 }
1203
1204 return CoerceAvailableValueToLoadType(Val, LoadTy, InsertPt, TD);
1205 }
Chris Lattner778cb922009-12-06 05:29:56 +00001206
1207 // Otherwise, this is a memcpy/memmove from a constant global.
1208 MemTransferInst *MTI = cast<MemTransferInst>(SrcInst);
1209 Constant *Src = cast<Constant>(MTI->getSource());
1210
1211 // Otherwise, see if we can constant fold a load from the constant with the
1212 // offset applied as appropriate.
1213 Src = ConstantExpr::getBitCast(Src,
1214 llvm::Type::getInt8PtrTy(Src->getContext()));
1215 Constant *OffsetCst =
1216 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1217 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
1218 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(LoadTy));
1219 return ConstantFoldLoadFromConstPtr(Src, &TD);
Chris Lattner42376062009-12-06 01:57:02 +00001220}
1221
Dan Gohmanb29cda92010-04-15 17:08:50 +00001222namespace {
Chris Lattner42376062009-12-06 01:57:02 +00001223
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001224struct AvailableValueInBlock {
1225 /// BB - The basic block in question.
1226 BasicBlock *BB;
Chris Lattner93236ba2009-12-06 04:54:31 +00001227 enum ValType {
1228 SimpleVal, // A simple offsetted value that is accessed.
1229 MemIntrin // A memory intrinsic which is loaded from.
1230 };
1231
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001232 /// V - The value that is live out of the block.
Chris Lattner93236ba2009-12-06 04:54:31 +00001233 PointerIntPair<Value *, 1, ValType> Val;
1234
1235 /// Offset - The byte offset in Val that is interesting for the load query.
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001236 unsigned Offset;
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001237
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001238 static AvailableValueInBlock get(BasicBlock *BB, Value *V,
1239 unsigned Offset = 0) {
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001240 AvailableValueInBlock Res;
1241 Res.BB = BB;
Chris Lattner93236ba2009-12-06 04:54:31 +00001242 Res.Val.setPointer(V);
1243 Res.Val.setInt(SimpleVal);
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001244 Res.Offset = Offset;
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001245 return Res;
1246 }
Chris Lattner93236ba2009-12-06 04:54:31 +00001247
1248 static AvailableValueInBlock getMI(BasicBlock *BB, MemIntrinsic *MI,
1249 unsigned Offset = 0) {
1250 AvailableValueInBlock Res;
1251 Res.BB = BB;
1252 Res.Val.setPointer(MI);
1253 Res.Val.setInt(MemIntrin);
1254 Res.Offset = Offset;
1255 return Res;
1256 }
1257
1258 bool isSimpleValue() const { return Val.getInt() == SimpleVal; }
1259 Value *getSimpleValue() const {
1260 assert(isSimpleValue() && "Wrong accessor");
1261 return Val.getPointer();
1262 }
1263
1264 MemIntrinsic *getMemIntrinValue() const {
1265 assert(!isSimpleValue() && "Wrong accessor");
1266 return cast<MemIntrinsic>(Val.getPointer());
1267 }
Chris Lattner927b0ac2009-12-21 23:04:33 +00001268
1269 /// MaterializeAdjustedValue - Emit code into this block to adjust the value
1270 /// defined here to the specified type. This handles various coercion cases.
1271 Value *MaterializeAdjustedValue(const Type *LoadTy,
1272 const TargetData *TD) const {
1273 Value *Res;
1274 if (isSimpleValue()) {
1275 Res = getSimpleValue();
1276 if (Res->getType() != LoadTy) {
1277 assert(TD && "Need target data to handle type mismatch case");
1278 Res = GetStoreValueForLoad(Res, Offset, LoadTy, BB->getTerminator(),
1279 *TD);
1280
1281 DEBUG(errs() << "GVN COERCED NONLOCAL VAL:\nOffset: " << Offset << " "
1282 << *getSimpleValue() << '\n'
1283 << *Res << '\n' << "\n\n\n");
1284 }
1285 } else {
1286 Res = GetMemInstValueForLoad(getMemIntrinValue(), Offset,
1287 LoadTy, BB->getTerminator(), *TD);
1288 DEBUG(errs() << "GVN COERCED NONLOCAL MEM INTRIN:\nOffset: " << Offset
1289 << " " << *getMemIntrinValue() << '\n'
1290 << *Res << '\n' << "\n\n\n");
1291 }
1292 return Res;
1293 }
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001294};
1295
Dan Gohmanb29cda92010-04-15 17:08:50 +00001296}
1297
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001298/// ConstructSSAForLoadSet - Given a set of loads specified by ValuesPerBlock,
1299/// construct SSA form, allowing us to eliminate LI. This returns the value
1300/// that should be used at LI's definition site.
1301static Value *ConstructSSAForLoadSet(LoadInst *LI,
1302 SmallVectorImpl<AvailableValueInBlock> &ValuesPerBlock,
1303 const TargetData *TD,
Chris Lattnerbf200182009-12-21 23:15:48 +00001304 const DominatorTree &DT,
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001305 AliasAnalysis *AA) {
Chris Lattnerbf200182009-12-21 23:15:48 +00001306 // Check for the fully redundant, dominating load case. In this case, we can
1307 // just use the dominating value directly.
1308 if (ValuesPerBlock.size() == 1 &&
1309 DT.properlyDominates(ValuesPerBlock[0].BB, LI->getParent()))
1310 return ValuesPerBlock[0].MaterializeAdjustedValue(LI->getType(), TD);
1311
1312 // Otherwise, we have to construct SSA form.
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001313 SmallVector<PHINode*, 8> NewPHIs;
1314 SSAUpdater SSAUpdate(&NewPHIs);
1315 SSAUpdate.Initialize(LI);
1316
1317 const Type *LoadTy = LI->getType();
1318
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001319 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
Chris Lattner93236ba2009-12-06 04:54:31 +00001320 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1321 BasicBlock *BB = AV.BB;
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001322
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001323 if (SSAUpdate.HasValueForBlock(BB))
1324 continue;
Chris Lattner93236ba2009-12-06 04:54:31 +00001325
Chris Lattner927b0ac2009-12-21 23:04:33 +00001326 SSAUpdate.AddAvailableValue(BB, AV.MaterializeAdjustedValue(LoadTy, TD));
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001327 }
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001328
1329 // Perform PHI construction.
1330 Value *V = SSAUpdate.GetValueInMiddleOfBlock(LI->getParent());
1331
1332 // If new PHI nodes were created, notify alias analysis.
Duncan Sands19d0b472010-02-16 11:11:14 +00001333 if (V->getType()->isPointerTy())
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001334 for (unsigned i = 0, e = NewPHIs.size(); i != e; ++i)
1335 AA->copyValue(LI, NewPHIs[i]);
1336
1337 return V;
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001338}
1339
Gabor Greifce6dd882010-04-09 10:57:00 +00001340static bool isLifetimeStart(const Instruction *Inst) {
1341 if (const IntrinsicInst* II = dyn_cast<IntrinsicInst>(Inst))
Owen Andersonb9878ee2009-12-02 07:35:19 +00001342 return II->getIntrinsicID() == Intrinsic::lifetime_start;
Chris Lattnerc4680252009-12-02 06:44:58 +00001343 return false;
1344}
1345
Owen Anderson221a4362007-08-16 22:02:55 +00001346/// processNonLocalLoad - Attempt to eliminate a load whose dependencies are
1347/// non-local by performing PHI construction.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001348bool GVN::processNonLocalLoad(LoadInst *LI,
Chris Lattner804209d2008-03-21 22:01:16 +00001349 SmallVectorImpl<Instruction*> &toErase) {
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001350 // Find the non-local dependencies of the load.
Chris Lattner9b7d99e2009-12-22 04:25:02 +00001351 SmallVector<NonLocalDepResult, 64> Deps;
Chris Lattnerb6fc4b82008-12-09 19:25:07 +00001352 MD->getNonLocalPointerDependency(LI->getOperand(0), true, LI->getParent(),
1353 Deps);
David Greene2e6efc42010-01-05 01:27:17 +00001354 //DEBUG(dbgs() << "INVESTIGATING NONLOCAL LOAD: "
Dan Gohmanef3ef7f2009-07-31 20:24:18 +00001355 // << Deps.size() << *LI << '\n');
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001356
Owen Andersonb39e0de2008-08-26 22:07:42 +00001357 // If we had to process more than one hundred blocks to find the
1358 // dependencies, this load isn't worth worrying about. Optimizing
1359 // it will be too expensive.
Chris Lattnerb6fc4b82008-12-09 19:25:07 +00001360 if (Deps.size() > 100)
Owen Andersonb39e0de2008-08-26 22:07:42 +00001361 return false;
Chris Lattnerb6372932008-12-18 00:51:32 +00001362
1363 // If we had a phi translation failure, we'll have a single entry which is a
1364 // clobber in the current block. Reject this early.
Chris Lattner0c315472009-12-09 07:08:01 +00001365 if (Deps.size() == 1 && Deps[0].getResult().isClobber()) {
Torok Edwinba93ea72009-06-17 18:48:18 +00001366 DEBUG(
David Greene2e6efc42010-01-05 01:27:17 +00001367 dbgs() << "GVN: non-local load ";
1368 WriteAsOperand(dbgs(), LI);
1369 dbgs() << " is clobbered by " << *Deps[0].getResult().getInst() << '\n';
Torok Edwinba93ea72009-06-17 18:48:18 +00001370 );
Chris Lattnerb6372932008-12-18 00:51:32 +00001371 return false;
Torok Edwinba93ea72009-06-17 18:48:18 +00001372 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001373
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001374 // Filter out useless results (non-locals, etc). Keep track of the blocks
1375 // where we have a value available in repl, also keep track of whether we see
1376 // dependencies that produce an unknown value for the load (such as a call
1377 // that could potentially clobber the load).
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001378 SmallVector<AvailableValueInBlock, 16> ValuesPerBlock;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001379 SmallVector<BasicBlock*, 16> UnavailableBlocks;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001380
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001381 const TargetData *TD = 0;
1382
Chris Lattnerb6fc4b82008-12-09 19:25:07 +00001383 for (unsigned i = 0, e = Deps.size(); i != e; ++i) {
Chris Lattner0c315472009-12-09 07:08:01 +00001384 BasicBlock *DepBB = Deps[i].getBB();
1385 MemDepResult DepInfo = Deps[i].getResult();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001386
Chris Lattner0e3d6332008-12-05 21:04:20 +00001387 if (DepInfo.isClobber()) {
Chris Lattnerca5f9cb2009-12-09 18:21:46 +00001388 // The address being loaded in this non-local block may not be the same as
1389 // the pointer operand of the load if PHI translation occurs. Make sure
1390 // to consider the right address.
1391 Value *Address = Deps[i].getAddress();
1392
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001393 // If the dependence is to a store that writes to a superset of the bits
1394 // read by the load, we can extract the bits we need for the load from the
1395 // stored value.
1396 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInfo.getInst())) {
1397 if (TD == 0)
1398 TD = getAnalysisIfAvailable<TargetData>();
Chris Lattnerca5f9cb2009-12-09 18:21:46 +00001399 if (TD && Address) {
1400 int Offset = AnalyzeLoadFromClobberingStore(LI->getType(), Address,
Chris Lattner07df9ef2009-12-09 07:37:07 +00001401 DepSI, *TD);
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001402 if (Offset != -1) {
1403 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1404 DepSI->getOperand(0),
1405 Offset));
1406 continue;
1407 }
1408 }
1409 }
Chris Lattner42376062009-12-06 01:57:02 +00001410
Chris Lattner42376062009-12-06 01:57:02 +00001411 // If the clobbering value is a memset/memcpy/memmove, see if we can
1412 // forward a value on from it.
Chris Lattner93236ba2009-12-06 04:54:31 +00001413 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(DepInfo.getInst())) {
Chris Lattner42376062009-12-06 01:57:02 +00001414 if (TD == 0)
1415 TD = getAnalysisIfAvailable<TargetData>();
Chris Lattnerca5f9cb2009-12-09 18:21:46 +00001416 if (TD && Address) {
1417 int Offset = AnalyzeLoadFromClobberingMemInst(LI->getType(), Address,
Chris Lattner07df9ef2009-12-09 07:37:07 +00001418 DepMI, *TD);
Chris Lattner93236ba2009-12-06 04:54:31 +00001419 if (Offset != -1) {
1420 ValuesPerBlock.push_back(AvailableValueInBlock::getMI(DepBB, DepMI,
1421 Offset));
1422 continue;
1423 }
Chris Lattner42376062009-12-06 01:57:02 +00001424 }
1425 }
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001426
Chris Lattner0e3d6332008-12-05 21:04:20 +00001427 UnavailableBlocks.push_back(DepBB);
1428 continue;
1429 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001430
Chris Lattner0e3d6332008-12-05 21:04:20 +00001431 Instruction *DepInst = DepInfo.getInst();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001432
Chris Lattner0e3d6332008-12-05 21:04:20 +00001433 // Loading the allocation -> undef.
Chris Lattnerc4680252009-12-02 06:44:58 +00001434 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst) ||
Owen Andersonb9878ee2009-12-02 07:35:19 +00001435 // Loading immediately after lifetime begin -> undef.
1436 isLifetimeStart(DepInst)) {
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001437 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1438 UndefValue::get(LI->getType())));
Chris Lattner7e61daf2008-12-01 01:15:42 +00001439 continue;
1440 }
Owen Anderson2b2bd282009-10-28 07:05:35 +00001441
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001442 if (StoreInst *S = dyn_cast<StoreInst>(DepInst)) {
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001443 // Reject loads and stores that are to the same address but are of
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001444 // different types if we have to.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001445 if (S->getOperand(0)->getType() != LI->getType()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001446 if (TD == 0)
1447 TD = getAnalysisIfAvailable<TargetData>();
1448
1449 // If the stored value is larger or equal to the loaded value, we can
1450 // reuse it.
Chris Lattner9045f232009-09-21 17:24:04 +00001451 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(S->getOperand(0),
1452 LI->getType(), *TD)) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001453 UnavailableBlocks.push_back(DepBB);
1454 continue;
1455 }
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001456 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001457
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001458 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1459 S->getOperand(0)));
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001460 continue;
1461 }
1462
1463 if (LoadInst *LD = dyn_cast<LoadInst>(DepInst)) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001464 // If the types mismatch and we can't handle it, reject reuse of the load.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001465 if (LD->getType() != LI->getType()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001466 if (TD == 0)
1467 TD = getAnalysisIfAvailable<TargetData>();
1468
1469 // If the stored value is larger or equal to the loaded value, we can
1470 // reuse it.
Chris Lattner9045f232009-09-21 17:24:04 +00001471 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(LD, LI->getType(),*TD)){
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001472 UnavailableBlocks.push_back(DepBB);
1473 continue;
1474 }
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001475 }
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001476 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB, LD));
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001477 continue;
Owen Anderson5e5599b2007-07-25 19:57:03 +00001478 }
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001479
1480 UnavailableBlocks.push_back(DepBB);
1481 continue;
Chris Lattner2876a642008-03-21 21:14:38 +00001482 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001483
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001484 // If we have no predecessors that produce a known value for this load, exit
1485 // early.
1486 if (ValuesPerBlock.empty()) return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001487
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001488 // If all of the instructions we depend on produce a known value for this
1489 // load, then it is fully redundant and we can use PHI insertion to compute
1490 // its value. Insert PHIs and remove the fully redundant value now.
1491 if (UnavailableBlocks.empty()) {
David Greene2e6efc42010-01-05 01:27:17 +00001492 DEBUG(dbgs() << "GVN REMOVING NONLOCAL LOAD: " << *LI << '\n');
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001493
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001494 // Perform PHI construction.
Chris Lattnerbf200182009-12-21 23:15:48 +00001495 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD, *DT,
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001496 VN.getAliasAnalysis());
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001497 LI->replaceAllUsesWith(V);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001498
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001499 if (isa<PHINode>(V))
1500 V->takeName(LI);
Duncan Sands19d0b472010-02-16 11:11:14 +00001501 if (V->getType()->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001502 MD->invalidateCachedPointerInfo(V);
Bob Wilson1da90412010-02-22 21:39:41 +00001503 VN.erase(LI);
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001504 toErase.push_back(LI);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001505 ++NumGVNLoad;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001506 return true;
1507 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001508
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001509 if (!EnablePRE || !EnableLoadPRE)
1510 return false;
1511
1512 // Okay, we have *some* definitions of the value. This means that the value
1513 // is available in some of our (transitive) predecessors. Lets think about
1514 // doing PRE of this load. This will involve inserting a new load into the
1515 // predecessor when it's not available. We could do this in general, but
1516 // prefer to not increase code size. As such, we only do this when we know
1517 // that we only have to insert *one* load (which means we're basically moving
1518 // the load, not inserting a new one).
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001519
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001520 SmallPtrSet<BasicBlock *, 4> Blockers;
1521 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1522 Blockers.insert(UnavailableBlocks[i]);
1523
1524 // Lets find first basic block with more than one predecessor. Walk backwards
1525 // through predecessors if needed.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001526 BasicBlock *LoadBB = LI->getParent();
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001527 BasicBlock *TmpBB = LoadBB;
1528
1529 bool isSinglePred = false;
Dale Johannesen81b64632009-06-17 20:48:23 +00001530 bool allSingleSucc = true;
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001531 while (TmpBB->getSinglePredecessor()) {
1532 isSinglePred = true;
1533 TmpBB = TmpBB->getSinglePredecessor();
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001534 if (TmpBB == LoadBB) // Infinite (unreachable) loop.
1535 return false;
1536 if (Blockers.count(TmpBB))
1537 return false;
Dale Johannesen81b64632009-06-17 20:48:23 +00001538 if (TmpBB->getTerminator()->getNumSuccessors() != 1)
1539 allSingleSucc = false;
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001540 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001541
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001542 assert(TmpBB);
1543 LoadBB = TmpBB;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001544
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001545 // If we have a repl set with LI itself in it, this means we have a loop where
1546 // at least one of the values is LI. Since this means that we won't be able
1547 // to eliminate LI even if we insert uses in the other predecessors, we will
1548 // end up increasing code size. Reject this by scanning for LI.
Bob Wilson0fd41582010-03-02 00:09:29 +00001549 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
1550 if (ValuesPerBlock[i].isSimpleValue() &&
1551 ValuesPerBlock[i].getSimpleValue() == LI) {
1552 // Skip cases where LI is the only definition, even for EnableFullLoadPRE.
1553 if (!EnableFullLoadPRE || e == 1)
Bob Wilsond517b522010-02-01 21:17:14 +00001554 return false;
Bob Wilson0fd41582010-03-02 00:09:29 +00001555 }
Bob Wilsond517b522010-02-01 21:17:14 +00001556 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001557
Chris Lattner93236ba2009-12-06 04:54:31 +00001558 // FIXME: It is extremely unclear what this loop is doing, other than
1559 // artificially restricting loadpre.
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001560 if (isSinglePred) {
1561 bool isHot = false;
Chris Lattner93236ba2009-12-06 04:54:31 +00001562 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
1563 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1564 if (AV.isSimpleValue())
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001565 // "Hot" Instruction is in some loop (because it dominates its dep.
1566 // instruction).
Chris Lattner93236ba2009-12-06 04:54:31 +00001567 if (Instruction *I = dyn_cast<Instruction>(AV.getSimpleValue()))
1568 if (DT->dominates(LI, I)) {
1569 isHot = true;
1570 break;
1571 }
1572 }
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001573
1574 // We are interested only in "hot" instructions. We don't want to do any
1575 // mis-optimizations here.
1576 if (!isHot)
1577 return false;
1578 }
1579
Bob Wilsond517b522010-02-01 21:17:14 +00001580 // Check to see how many predecessors have the loaded value fully
1581 // available.
1582 DenseMap<BasicBlock*, Value*> PredLoads;
Chris Lattnerd2a653a2008-12-05 07:49:08 +00001583 DenseMap<BasicBlock*, char> FullyAvailableBlocks;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001584 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i)
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001585 FullyAvailableBlocks[ValuesPerBlock[i].BB] = true;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001586 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1587 FullyAvailableBlocks[UnavailableBlocks[i]] = false;
1588
Bob Wilsona2fda8b2010-05-04 20:03:21 +00001589 SmallVector<std::pair<TerminatorInst*, unsigned>, 4> NeedToSplit;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001590 for (pred_iterator PI = pred_begin(LoadBB), E = pred_end(LoadBB);
1591 PI != E; ++PI) {
Bob Wilsond517b522010-02-01 21:17:14 +00001592 BasicBlock *Pred = *PI;
1593 if (IsValueFullyAvailableInBlock(Pred, FullyAvailableBlocks)) {
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001594 continue;
Bob Wilsond517b522010-02-01 21:17:14 +00001595 }
1596 PredLoads[Pred] = 0;
Bob Wilson92cdb6e2010-02-16 19:51:59 +00001597
Bob Wilsond517b522010-02-01 21:17:14 +00001598 if (Pred->getTerminator()->getNumSuccessors() != 1) {
Bob Wilson92cdb6e2010-02-16 19:51:59 +00001599 if (isa<IndirectBrInst>(Pred->getTerminator())) {
1600 DEBUG(dbgs() << "COULD NOT PRE LOAD BECAUSE OF INDBR CRITICAL EDGE '"
1601 << Pred->getName() << "': " << *LI << '\n');
1602 return false;
1603 }
Bob Wilsonaff96b22010-02-16 21:06:42 +00001604 unsigned SuccNum = GetSuccessorNumber(Pred, LoadBB);
Bob Wilsona2fda8b2010-05-04 20:03:21 +00001605 NeedToSplit.push_back(std::make_pair(Pred->getTerminator(), SuccNum));
Bob Wilsond517b522010-02-01 21:17:14 +00001606 }
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001607 }
Bob Wilsona2fda8b2010-05-04 20:03:21 +00001608 if (!NeedToSplit.empty()) {
Bob Wilson0c8b29b2010-05-05 20:44:15 +00001609 toSplit.append(NeedToSplit.begin(), NeedToSplit.end());
Bob Wilson892432b2010-03-01 23:37:32 +00001610 return false;
Bob Wilsona2fda8b2010-05-04 20:03:21 +00001611 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001612
Bob Wilsond517b522010-02-01 21:17:14 +00001613 // Decide whether PRE is profitable for this load.
1614 unsigned NumUnavailablePreds = PredLoads.size();
1615 assert(NumUnavailablePreds != 0 &&
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001616 "Fully available value should be eliminated above!");
Bob Wilsond517b522010-02-01 21:17:14 +00001617 if (!EnableFullLoadPRE) {
1618 // If this load is unavailable in multiple predecessors, reject it.
1619 // FIXME: If we could restructure the CFG, we could make a common pred with
1620 // all the preds that don't have an available LI and insert a new load into
1621 // that one block.
1622 if (NumUnavailablePreds != 1)
1623 return false;
Owen Anderson0cc1a762007-08-07 23:12:31 +00001624 }
Bob Wilsond517b522010-02-01 21:17:14 +00001625
1626 // Check if the load can safely be moved to all the unavailable predecessors.
1627 bool CanDoPRE = true;
Chris Lattner44da5bd2009-11-28 15:39:14 +00001628 SmallVector<Instruction*, 8> NewInsts;
Bob Wilsond517b522010-02-01 21:17:14 +00001629 for (DenseMap<BasicBlock*, Value*>::iterator I = PredLoads.begin(),
1630 E = PredLoads.end(); I != E; ++I) {
1631 BasicBlock *UnavailablePred = I->first;
1632
1633 // Do PHI translation to get its value in the predecessor if necessary. The
1634 // returned pointer (if non-null) is guaranteed to dominate UnavailablePred.
1635
1636 // If all preds have a single successor, then we know it is safe to insert
1637 // the load on the pred (?!?), so we can insert code to materialize the
1638 // pointer if it is not available.
1639 PHITransAddr Address(LI->getOperand(0), TD);
1640 Value *LoadPtr = 0;
1641 if (allSingleSucc) {
1642 LoadPtr = Address.PHITranslateWithInsertion(LoadBB, UnavailablePred,
1643 *DT, NewInsts);
1644 } else {
Daniel Dunbar693ea892010-02-24 08:48:04 +00001645 Address.PHITranslateValue(LoadBB, UnavailablePred, DT);
Bob Wilsond517b522010-02-01 21:17:14 +00001646 LoadPtr = Address.getAddr();
Bob Wilsond517b522010-02-01 21:17:14 +00001647 }
1648
1649 // If we couldn't find or insert a computation of this phi translated value,
1650 // we fail PRE.
1651 if (LoadPtr == 0) {
1652 DEBUG(dbgs() << "COULDN'T INSERT PHI TRANSLATED VALUE OF: "
1653 << *LI->getOperand(0) << "\n");
1654 CanDoPRE = false;
1655 break;
1656 }
1657
1658 // Make sure it is valid to move this load here. We have to watch out for:
1659 // @1 = getelementptr (i8* p, ...
1660 // test p and branch if == 0
1661 // load @1
1662 // It is valid to have the getelementptr before the test, even if p can be 0,
1663 // as getelementptr only does address arithmetic.
1664 // If we are not pushing the value through any multiple-successor blocks
1665 // we do not have this case. Otherwise, check that the load is safe to
1666 // put anywhere; this can be improved, but should be conservatively safe.
1667 if (!allSingleSucc &&
1668 // FIXME: REEVALUTE THIS.
1669 !isSafeToLoadUnconditionally(LoadPtr,
1670 UnavailablePred->getTerminator(),
1671 LI->getAlignment(), TD)) {
1672 CanDoPRE = false;
1673 break;
1674 }
1675
1676 I->second = LoadPtr;
Chris Lattner972e6d82009-12-09 01:59:31 +00001677 }
1678
Bob Wilsond517b522010-02-01 21:17:14 +00001679 if (!CanDoPRE) {
1680 while (!NewInsts.empty())
1681 NewInsts.pop_back_val()->eraseFromParent();
Dale Johannesen81b64632009-06-17 20:48:23 +00001682 return false;
Chris Lattner32140312009-11-28 16:08:18 +00001683 }
Dale Johannesen81b64632009-06-17 20:48:23 +00001684
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001685 // Okay, we can eliminate this load by inserting a reload in the predecessor
1686 // and using PHI construction to get the value in the other predecessors, do
1687 // it.
David Greene2e6efc42010-01-05 01:27:17 +00001688 DEBUG(dbgs() << "GVN REMOVING PRE LOAD: " << *LI << '\n');
Chris Lattner32140312009-11-28 16:08:18 +00001689 DEBUG(if (!NewInsts.empty())
David Greene2e6efc42010-01-05 01:27:17 +00001690 dbgs() << "INSERTED " << NewInsts.size() << " INSTS: "
Chris Lattner32140312009-11-28 16:08:18 +00001691 << *NewInsts.back() << '\n');
1692
Bob Wilsond517b522010-02-01 21:17:14 +00001693 // Assign value numbers to the new instructions.
1694 for (unsigned i = 0, e = NewInsts.size(); i != e; ++i) {
1695 // FIXME: We really _ought_ to insert these value numbers into their
1696 // parent's availability map. However, in doing so, we risk getting into
1697 // ordering issues. If a block hasn't been processed yet, we would be
1698 // marking a value as AVAIL-IN, which isn't what we intend.
1699 VN.lookup_or_add(NewInsts[i]);
1700 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001701
Bob Wilsond517b522010-02-01 21:17:14 +00001702 for (DenseMap<BasicBlock*, Value*>::iterator I = PredLoads.begin(),
1703 E = PredLoads.end(); I != E; ++I) {
1704 BasicBlock *UnavailablePred = I->first;
1705 Value *LoadPtr = I->second;
1706
1707 Value *NewLoad = new LoadInst(LoadPtr, LI->getName()+".pre", false,
1708 LI->getAlignment(),
1709 UnavailablePred->getTerminator());
1710
1711 // Add the newly created load.
1712 ValuesPerBlock.push_back(AvailableValueInBlock::get(UnavailablePred,
1713 NewLoad));
Bob Wilson923261b2010-02-23 05:55:00 +00001714 MD->invalidateCachedPointerInfo(LoadPtr);
1715 DEBUG(dbgs() << "GVN INSERTED " << *NewLoad << '\n');
Bob Wilsond517b522010-02-01 21:17:14 +00001716 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001717
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001718 // Perform PHI construction.
Chris Lattnerbf200182009-12-21 23:15:48 +00001719 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD, *DT,
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001720 VN.getAliasAnalysis());
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001721 LI->replaceAllUsesWith(V);
1722 if (isa<PHINode>(V))
1723 V->takeName(LI);
Duncan Sands19d0b472010-02-16 11:11:14 +00001724 if (V->getType()->isPointerTy())
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001725 MD->invalidateCachedPointerInfo(V);
Bob Wilson1da90412010-02-22 21:39:41 +00001726 VN.erase(LI);
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001727 toErase.push_back(LI);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001728 ++NumPRELoad;
Owen Anderson5e5599b2007-07-25 19:57:03 +00001729 return true;
1730}
1731
Owen Anderson221a4362007-08-16 22:02:55 +00001732/// processLoad - Attempt to eliminate a load, first by eliminating it
1733/// locally, and then attempting non-local elimination if that fails.
Chris Lattner0e3d6332008-12-05 21:04:20 +00001734bool GVN::processLoad(LoadInst *L, SmallVectorImpl<Instruction*> &toErase) {
Dan Gohman81132462009-11-14 02:27:51 +00001735 if (!MD)
1736 return false;
1737
Chris Lattner0e3d6332008-12-05 21:04:20 +00001738 if (L->isVolatile())
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001739 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001740
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001741 // ... to a pointer that has been loaded from before...
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001742 MemDepResult Dep = MD->getDependency(L);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001743
Chris Lattner0e3d6332008-12-05 21:04:20 +00001744 // If the value isn't available, don't do anything!
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001745 if (Dep.isClobber()) {
Chris Lattner0a9616d2009-09-21 05:57:11 +00001746 // Check to see if we have something like this:
Chris Lattner1dd48c32009-09-20 19:03:47 +00001747 // store i32 123, i32* %P
1748 // %A = bitcast i32* %P to i8*
1749 // %B = gep i8* %A, i32 1
1750 // %C = load i8* %B
1751 //
1752 // We could do that by recognizing if the clobber instructions are obviously
1753 // a common base + constant offset, and if the previous store (or memset)
1754 // completely covers this load. This sort of thing can happen in bitfield
1755 // access code.
Chris Lattner42376062009-12-06 01:57:02 +00001756 Value *AvailVal = 0;
Chris Lattner0a9616d2009-09-21 05:57:11 +00001757 if (StoreInst *DepSI = dyn_cast<StoreInst>(Dep.getInst()))
Chris Lattner9d7fb292009-09-21 06:22:46 +00001758 if (const TargetData *TD = getAnalysisIfAvailable<TargetData>()) {
Chris Lattner07df9ef2009-12-09 07:37:07 +00001759 int Offset = AnalyzeLoadFromClobberingStore(L->getType(),
1760 L->getPointerOperand(),
1761 DepSI, *TD);
Chris Lattner42376062009-12-06 01:57:02 +00001762 if (Offset != -1)
1763 AvailVal = GetStoreValueForLoad(DepSI->getOperand(0), Offset,
1764 L->getType(), L, *TD);
Chris Lattner9d7fb292009-09-21 06:22:46 +00001765 }
Chris Lattner0a9616d2009-09-21 05:57:11 +00001766
Chris Lattner42376062009-12-06 01:57:02 +00001767 // If the clobbering value is a memset/memcpy/memmove, see if we can forward
1768 // a value on from it.
1769 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(Dep.getInst())) {
1770 if (const TargetData *TD = getAnalysisIfAvailable<TargetData>()) {
Chris Lattner07df9ef2009-12-09 07:37:07 +00001771 int Offset = AnalyzeLoadFromClobberingMemInst(L->getType(),
1772 L->getPointerOperand(),
1773 DepMI, *TD);
Chris Lattner42376062009-12-06 01:57:02 +00001774 if (Offset != -1)
1775 AvailVal = GetMemInstValueForLoad(DepMI, Offset, L->getType(), L,*TD);
1776 }
1777 }
1778
1779 if (AvailVal) {
David Greene2e6efc42010-01-05 01:27:17 +00001780 DEBUG(dbgs() << "GVN COERCED INST:\n" << *Dep.getInst() << '\n'
Chris Lattner42376062009-12-06 01:57:02 +00001781 << *AvailVal << '\n' << *L << "\n\n\n");
1782
1783 // Replace the load!
1784 L->replaceAllUsesWith(AvailVal);
Duncan Sands19d0b472010-02-16 11:11:14 +00001785 if (AvailVal->getType()->isPointerTy())
Chris Lattner42376062009-12-06 01:57:02 +00001786 MD->invalidateCachedPointerInfo(AvailVal);
Bob Wilson1da90412010-02-22 21:39:41 +00001787 VN.erase(L);
Chris Lattner42376062009-12-06 01:57:02 +00001788 toErase.push_back(L);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001789 ++NumGVNLoad;
Chris Lattner42376062009-12-06 01:57:02 +00001790 return true;
1791 }
1792
Torok Edwin72070282009-05-29 09:46:03 +00001793 DEBUG(
1794 // fast print dep, using operator<< on instruction would be too slow
David Greene2e6efc42010-01-05 01:27:17 +00001795 dbgs() << "GVN: load ";
1796 WriteAsOperand(dbgs(), L);
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001797 Instruction *I = Dep.getInst();
David Greene2e6efc42010-01-05 01:27:17 +00001798 dbgs() << " is clobbered by " << *I << '\n';
Torok Edwin72070282009-05-29 09:46:03 +00001799 );
Chris Lattner0e3d6332008-12-05 21:04:20 +00001800 return false;
Torok Edwin72070282009-05-29 09:46:03 +00001801 }
Chris Lattner0e3d6332008-12-05 21:04:20 +00001802
1803 // If it is defined in another block, try harder.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001804 if (Dep.isNonLocal())
Chris Lattner0e3d6332008-12-05 21:04:20 +00001805 return processNonLocalLoad(L, toErase);
Eli Friedman716c10c2008-02-12 12:08:14 +00001806
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001807 Instruction *DepInst = Dep.getInst();
Chris Lattner0e3d6332008-12-05 21:04:20 +00001808 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInst)) {
Chris Lattner1dd48c32009-09-20 19:03:47 +00001809 Value *StoredVal = DepSI->getOperand(0);
1810
1811 // The store and load are to a must-aliased pointer, but they may not
1812 // actually have the same type. See if we know how to reuse the stored
1813 // value (depending on its type).
1814 const TargetData *TD = 0;
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001815 if (StoredVal->getType() != L->getType()) {
1816 if ((TD = getAnalysisIfAvailable<TargetData>())) {
1817 StoredVal = CoerceAvailableValueToLoadType(StoredVal, L->getType(),
1818 L, *TD);
1819 if (StoredVal == 0)
1820 return false;
1821
David Greene2e6efc42010-01-05 01:27:17 +00001822 DEBUG(dbgs() << "GVN COERCED STORE:\n" << *DepSI << '\n' << *StoredVal
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001823 << '\n' << *L << "\n\n\n");
1824 }
1825 else
Chris Lattner1dd48c32009-09-20 19:03:47 +00001826 return false;
Chris Lattner1dd48c32009-09-20 19:03:47 +00001827 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001828
Chris Lattner0e3d6332008-12-05 21:04:20 +00001829 // Remove it!
Chris Lattner1dd48c32009-09-20 19:03:47 +00001830 L->replaceAllUsesWith(StoredVal);
Duncan Sands19d0b472010-02-16 11:11:14 +00001831 if (StoredVal->getType()->isPointerTy())
Chris Lattner1dd48c32009-09-20 19:03:47 +00001832 MD->invalidateCachedPointerInfo(StoredVal);
Bob Wilson1da90412010-02-22 21:39:41 +00001833 VN.erase(L);
Chris Lattner0e3d6332008-12-05 21:04:20 +00001834 toErase.push_back(L);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001835 ++NumGVNLoad;
Chris Lattner0e3d6332008-12-05 21:04:20 +00001836 return true;
1837 }
1838
1839 if (LoadInst *DepLI = dyn_cast<LoadInst>(DepInst)) {
Chris Lattner1dd48c32009-09-20 19:03:47 +00001840 Value *AvailableVal = DepLI;
1841
1842 // The loads are of a must-aliased pointer, but they may not actually have
1843 // the same type. See if we know how to reuse the previously loaded value
1844 // (depending on its type).
1845 const TargetData *TD = 0;
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001846 if (DepLI->getType() != L->getType()) {
1847 if ((TD = getAnalysisIfAvailable<TargetData>())) {
1848 AvailableVal = CoerceAvailableValueToLoadType(DepLI, L->getType(), L,*TD);
1849 if (AvailableVal == 0)
1850 return false;
Chris Lattner1dd48c32009-09-20 19:03:47 +00001851
David Greene2e6efc42010-01-05 01:27:17 +00001852 DEBUG(dbgs() << "GVN COERCED LOAD:\n" << *DepLI << "\n" << *AvailableVal
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001853 << "\n" << *L << "\n\n\n");
1854 }
1855 else
1856 return false;
Chris Lattner1dd48c32009-09-20 19:03:47 +00001857 }
1858
Chris Lattner0e3d6332008-12-05 21:04:20 +00001859 // Remove it!
Chris Lattner1dd48c32009-09-20 19:03:47 +00001860 L->replaceAllUsesWith(AvailableVal);
Duncan Sands19d0b472010-02-16 11:11:14 +00001861 if (DepLI->getType()->isPointerTy())
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00001862 MD->invalidateCachedPointerInfo(DepLI);
Bob Wilson1da90412010-02-22 21:39:41 +00001863 VN.erase(L);
Chris Lattner0e3d6332008-12-05 21:04:20 +00001864 toErase.push_back(L);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001865 ++NumGVNLoad;
Chris Lattner0e3d6332008-12-05 21:04:20 +00001866 return true;
1867 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001868
Chris Lattner3ff6d012008-11-30 01:39:32 +00001869 // If this load really doesn't depend on anything, then we must be loading an
1870 // undef value. This can happen when loading for a fresh allocation with no
1871 // intervening stores, for example.
Victor Hernandez8acf2952009-10-23 21:09:37 +00001872 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst)) {
Owen Andersonb292b8c2009-07-30 23:03:37 +00001873 L->replaceAllUsesWith(UndefValue::get(L->getType()));
Bob Wilson1da90412010-02-22 21:39:41 +00001874 VN.erase(L);
Chris Lattner3ff6d012008-11-30 01:39:32 +00001875 toErase.push_back(L);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001876 ++NumGVNLoad;
Chris Lattner0e3d6332008-12-05 21:04:20 +00001877 return true;
Eli Friedman716c10c2008-02-12 12:08:14 +00001878 }
Owen Anderson2b2bd282009-10-28 07:05:35 +00001879
Owen Andersonb9878ee2009-12-02 07:35:19 +00001880 // If this load occurs either right after a lifetime begin,
Owen Anderson2b2bd282009-10-28 07:05:35 +00001881 // then the loaded value is undefined.
1882 if (IntrinsicInst* II = dyn_cast<IntrinsicInst>(DepInst)) {
Owen Andersonb9878ee2009-12-02 07:35:19 +00001883 if (II->getIntrinsicID() == Intrinsic::lifetime_start) {
Owen Anderson2b2bd282009-10-28 07:05:35 +00001884 L->replaceAllUsesWith(UndefValue::get(L->getType()));
Bob Wilson1da90412010-02-22 21:39:41 +00001885 VN.erase(L);
Owen Anderson2b2bd282009-10-28 07:05:35 +00001886 toErase.push_back(L);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00001887 ++NumGVNLoad;
Owen Anderson2b2bd282009-10-28 07:05:35 +00001888 return true;
1889 }
1890 }
Eli Friedman716c10c2008-02-12 12:08:14 +00001891
Chris Lattner0e3d6332008-12-05 21:04:20 +00001892 return false;
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001893}
1894
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001895Value *GVN::lookupNumber(BasicBlock *BB, uint32_t num) {
Owen Anderson54e02192008-06-23 17:49:45 +00001896 DenseMap<BasicBlock*, ValueNumberScope*>::iterator I = localAvail.find(BB);
1897 if (I == localAvail.end())
1898 return 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001899
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001900 ValueNumberScope *Locals = I->second;
1901 while (Locals) {
1902 DenseMap<uint32_t, Value*>::iterator I = Locals->table.find(num);
1903 if (I != Locals->table.end())
Owen Anderson1b3ea962008-06-20 01:15:47 +00001904 return I->second;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001905 Locals = Locals->parent;
Owen Anderson1b3ea962008-06-20 01:15:47 +00001906 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001907
Owen Anderson1b3ea962008-06-20 01:15:47 +00001908 return 0;
1909}
1910
Owen Andersonbfe133e2008-12-15 02:03:00 +00001911
Owen Anderson398602a2007-08-14 18:16:29 +00001912/// processInstruction - When calculating availability, handle an instruction
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001913/// by inserting it into the appropriate sets
Owen Andersonaccdca12008-06-12 19:25:32 +00001914bool GVN::processInstruction(Instruction *I,
Chris Lattner804209d2008-03-21 22:01:16 +00001915 SmallVectorImpl<Instruction*> &toErase) {
Devang Patel03936a12010-02-11 00:20:49 +00001916 // Ignore dbg info intrinsics.
1917 if (isa<DbgInfoIntrinsic>(I))
1918 return false;
1919
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001920 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
1921 bool Changed = processLoad(LI, toErase);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001922
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001923 if (!Changed) {
1924 unsigned Num = VN.lookup_or_add(LI);
1925 localAvail[I->getParent()]->table.insert(std::make_pair(Num, LI));
Owen Anderson6a903bc2008-06-18 21:41:49 +00001926 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001927
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001928 return Changed;
Owen Anderson6a903bc2008-06-18 21:41:49 +00001929 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001930
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001931 uint32_t NextNum = VN.getNextUnusedValueNumber();
1932 unsigned Num = VN.lookup_or_add(I);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001933
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001934 if (BranchInst *BI = dyn_cast<BranchInst>(I)) {
1935 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001936
Owen Anderson98f912b2009-04-01 23:53:49 +00001937 if (!BI->isConditional() || isa<Constant>(BI->getCondition()))
1938 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001939
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001940 Value *BranchCond = BI->getCondition();
1941 uint32_t CondVN = VN.lookup_or_add(BranchCond);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001942
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001943 BasicBlock *TrueSucc = BI->getSuccessor(0);
1944 BasicBlock *FalseSucc = BI->getSuccessor(1);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001945
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001946 if (TrueSucc->getSinglePredecessor())
1947 localAvail[TrueSucc]->table[CondVN] =
1948 ConstantInt::getTrue(TrueSucc->getContext());
1949 if (FalseSucc->getSinglePredecessor())
1950 localAvail[FalseSucc]->table[CondVN] =
1951 ConstantInt::getFalse(TrueSucc->getContext());
Owen Anderson98f912b2009-04-01 23:53:49 +00001952
1953 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001954
Owen Anderson0c1e6342008-04-07 09:59:07 +00001955 // Allocations are always uniquely numbered, so we can save time and memory
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001956 // by fast failing them.
Victor Hernandez8acf2952009-10-23 21:09:37 +00001957 } else if (isa<AllocaInst>(I) || isa<TerminatorInst>(I)) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001958 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Anderson0c1e6342008-04-07 09:59:07 +00001959 return false;
Owen Anderson6a903bc2008-06-18 21:41:49 +00001960 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001961
Owen Anderson221a4362007-08-16 22:02:55 +00001962 // Collapse PHI nodes
Owen Andersonbc271a02007-08-14 18:33:27 +00001963 if (PHINode* p = dyn_cast<PHINode>(I)) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001964 Value *constVal = CollapsePhi(p);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001965
Owen Andersonbc271a02007-08-14 18:33:27 +00001966 if (constVal) {
Owen Andersonf5023a72007-08-16 22:51:56 +00001967 p->replaceAllUsesWith(constVal);
Duncan Sands19d0b472010-02-16 11:11:14 +00001968 if (MD && constVal->getType()->isPointerTy())
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00001969 MD->invalidateCachedPointerInfo(constVal);
Owen Anderson164274e2008-12-23 00:49:51 +00001970 VN.erase(p);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001971
Owen Andersonf5023a72007-08-16 22:51:56 +00001972 toErase.push_back(p);
Owen Anderson6a903bc2008-06-18 21:41:49 +00001973 } else {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001974 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Andersonbc271a02007-08-14 18:33:27 +00001975 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001976
Owen Anderson3ea90a72008-07-03 17:44:33 +00001977 // If the number we were assigned was a brand new VN, then we don't
1978 // need to do a lookup to see if the number already exists
1979 // somewhere in the domtree: it can't!
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001980 } else if (Num == NextNum) {
1981 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001982
Owen Andersonbfe133e2008-12-15 02:03:00 +00001983 // Perform fast-path value-number based elimination of values inherited from
1984 // dominators.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001985 } else if (Value *repl = lookupNumber(I->getParent(), Num)) {
Owen Anderson086b2c42007-12-08 01:37:09 +00001986 // Remove it!
Owen Anderson10ffa862007-07-31 23:27:13 +00001987 VN.erase(I);
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001988 I->replaceAllUsesWith(repl);
Duncan Sands19d0b472010-02-16 11:11:14 +00001989 if (MD && repl->getType()->isPointerTy())
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00001990 MD->invalidateCachedPointerInfo(repl);
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001991 toErase.push_back(I);
1992 return true;
Owen Andersonbfe133e2008-12-15 02:03:00 +00001993
Owen Anderson3ea90a72008-07-03 17:44:33 +00001994 } else {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001995 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001996 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001997
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001998 return false;
1999}
2000
Bill Wendling456e8852008-12-22 22:32:22 +00002001/// runOnFunction - This is the main transformation entry point for a function.
Owen Anderson676070d2007-08-14 18:04:11 +00002002bool GVN::runOnFunction(Function& F) {
Dan Gohman81132462009-11-14 02:27:51 +00002003 if (!NoLoads)
2004 MD = &getAnalysis<MemoryDependenceAnalysis>();
Chris Lattner8541ede2008-12-01 00:40:32 +00002005 DT = &getAnalysis<DominatorTree>();
Owen Andersonf7928602008-05-12 20:15:55 +00002006 VN.setAliasAnalysis(&getAnalysis<AliasAnalysis>());
Chris Lattner8541ede2008-12-01 00:40:32 +00002007 VN.setMemDep(MD);
2008 VN.setDomTree(DT);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002009
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002010 bool Changed = false;
2011 bool ShouldContinue = true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002012
Owen Andersonac310962008-07-16 17:52:31 +00002013 // Merge unconditional branches, allowing PRE to catch more
2014 // optimization opportunities.
2015 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002016 BasicBlock *BB = FI;
Owen Andersonac310962008-07-16 17:52:31 +00002017 ++FI;
Owen Andersonc0623812008-07-17 00:01:40 +00002018 bool removedBlock = MergeBlockIntoPredecessor(BB, this);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00002019 if (removedBlock) ++NumGVNBlocks;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002020
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002021 Changed |= removedBlock;
Owen Andersonac310962008-07-16 17:52:31 +00002022 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002023
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002024 unsigned Iteration = 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002025
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002026 while (ShouldContinue) {
David Greene2e6efc42010-01-05 01:27:17 +00002027 DEBUG(dbgs() << "GVN iteration: " << Iteration << "\n");
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002028 ShouldContinue = iterateOnFunction(F);
Bob Wilson92cdb6e2010-02-16 19:51:59 +00002029 if (splitCriticalEdges())
2030 ShouldContinue = true;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002031 Changed |= ShouldContinue;
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002032 ++Iteration;
Owen Anderson676070d2007-08-14 18:04:11 +00002033 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002034
Owen Anderson04a6e0b2008-07-18 18:03:38 +00002035 if (EnablePRE) {
Owen Anderson2fbfb702008-09-03 23:06:07 +00002036 bool PREChanged = true;
2037 while (PREChanged) {
2038 PREChanged = performPRE(F);
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002039 Changed |= PREChanged;
Owen Anderson2fbfb702008-09-03 23:06:07 +00002040 }
Owen Anderson04a6e0b2008-07-18 18:03:38 +00002041 }
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002042 // FIXME: Should perform GVN again after PRE does something. PRE can move
2043 // computations into blocks where they become fully redundant. Note that
2044 // we can't do this until PRE's critical edge splitting updates memdep.
2045 // Actually, when this happens, we should just fully integrate PRE into GVN.
Nuno Lopese3127f32008-10-10 16:25:50 +00002046
2047 cleanupGlobalSets();
2048
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002049 return Changed;
Owen Anderson676070d2007-08-14 18:04:11 +00002050}
2051
2052
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002053bool GVN::processBlock(BasicBlock *BB) {
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002054 // FIXME: Kill off toErase by doing erasing eagerly in a helper function (and
2055 // incrementing BI before processing an instruction).
Owen Andersonaccdca12008-06-12 19:25:32 +00002056 SmallVector<Instruction*, 8> toErase;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002057 bool ChangedFunction = false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002058
Owen Andersonaccdca12008-06-12 19:25:32 +00002059 for (BasicBlock::iterator BI = BB->begin(), BE = BB->end();
2060 BI != BE;) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002061 ChangedFunction |= processInstruction(BI, toErase);
Owen Andersonaccdca12008-06-12 19:25:32 +00002062 if (toErase.empty()) {
2063 ++BI;
2064 continue;
2065 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002066
Owen Andersonaccdca12008-06-12 19:25:32 +00002067 // If we need some instructions deleted, do it now.
2068 NumGVNInstr += toErase.size();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002069
Owen Andersonaccdca12008-06-12 19:25:32 +00002070 // Avoid iterator invalidation.
2071 bool AtStart = BI == BB->begin();
2072 if (!AtStart)
2073 --BI;
2074
2075 for (SmallVector<Instruction*, 4>::iterator I = toErase.begin(),
Chris Lattner8541ede2008-12-01 00:40:32 +00002076 E = toErase.end(); I != E; ++I) {
David Greene2e6efc42010-01-05 01:27:17 +00002077 DEBUG(dbgs() << "GVN removed: " << **I << '\n');
Dan Gohman81132462009-11-14 02:27:51 +00002078 if (MD) MD->removeInstruction(*I);
Owen Andersonaccdca12008-06-12 19:25:32 +00002079 (*I)->eraseFromParent();
Bill Wendlingebb6a542008-12-22 21:57:30 +00002080 DEBUG(verifyRemoved(*I));
Chris Lattner8541ede2008-12-01 00:40:32 +00002081 }
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002082 toErase.clear();
Owen Andersonaccdca12008-06-12 19:25:32 +00002083
2084 if (AtStart)
2085 BI = BB->begin();
2086 else
2087 ++BI;
Owen Andersonaccdca12008-06-12 19:25:32 +00002088 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002089
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002090 return ChangedFunction;
Owen Andersonaccdca12008-06-12 19:25:32 +00002091}
2092
Owen Anderson6a903bc2008-06-18 21:41:49 +00002093/// performPRE - Perform a purely local form of PRE that looks for diamond
2094/// control flow patterns and attempts to perform simple PRE at the join point.
Chris Lattnera546dcf2009-10-31 22:11:15 +00002095bool GVN::performPRE(Function &F) {
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002096 bool Changed = false;
Chris Lattnerf00aae42008-12-01 07:29:03 +00002097 DenseMap<BasicBlock*, Value*> predMap;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002098 for (df_iterator<BasicBlock*> DI = df_begin(&F.getEntryBlock()),
2099 DE = df_end(&F.getEntryBlock()); DI != DE; ++DI) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002100 BasicBlock *CurrentBlock = *DI;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002101
Owen Anderson6a903bc2008-06-18 21:41:49 +00002102 // Nothing to PRE in the entry block.
2103 if (CurrentBlock == &F.getEntryBlock()) continue;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002104
Owen Anderson6a903bc2008-06-18 21:41:49 +00002105 for (BasicBlock::iterator BI = CurrentBlock->begin(),
2106 BE = CurrentBlock->end(); BI != BE; ) {
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002107 Instruction *CurInst = BI++;
Duncan Sands1efabaa2009-05-06 06:49:50 +00002108
Victor Hernandez8acf2952009-10-23 21:09:37 +00002109 if (isa<AllocaInst>(CurInst) ||
Victor Hernandez5d034492009-09-18 22:35:49 +00002110 isa<TerminatorInst>(CurInst) || isa<PHINode>(CurInst) ||
Devang Patel92f86192009-10-14 17:29:00 +00002111 CurInst->getType()->isVoidTy() ||
Duncan Sands1efabaa2009-05-06 06:49:50 +00002112 CurInst->mayReadFromMemory() || CurInst->mayHaveSideEffects() ||
John Criswell073e4d12009-03-10 15:04:53 +00002113 isa<DbgInfoIntrinsic>(CurInst))
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002114 continue;
Duncan Sands1efabaa2009-05-06 06:49:50 +00002115
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002116 uint32_t ValNo = VN.lookup(CurInst);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002117
Owen Anderson6a903bc2008-06-18 21:41:49 +00002118 // Look for the predecessors for PRE opportunities. We're
2119 // only trying to solve the basic diamond case, where
2120 // a value is computed in the successor and one predecessor,
2121 // but not the other. We also explicitly disallow cases
2122 // where the successor is its own predecessor, because they're
2123 // more complicated to get right.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002124 unsigned NumWith = 0;
2125 unsigned NumWithout = 0;
2126 BasicBlock *PREPred = 0;
Chris Lattnerf00aae42008-12-01 07:29:03 +00002127 predMap.clear();
2128
Owen Anderson6a903bc2008-06-18 21:41:49 +00002129 for (pred_iterator PI = pred_begin(CurrentBlock),
2130 PE = pred_end(CurrentBlock); PI != PE; ++PI) {
2131 // We're not interested in PRE where the block is its
Bob Wilson76e8c592010-02-03 00:33:21 +00002132 // own predecessor, or in blocks with predecessors
Owen Anderson1b3ea962008-06-20 01:15:47 +00002133 // that are not reachable.
2134 if (*PI == CurrentBlock) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002135 NumWithout = 2;
Owen Anderson1b3ea962008-06-20 01:15:47 +00002136 break;
2137 } else if (!localAvail.count(*PI)) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002138 NumWithout = 2;
Owen Anderson1b3ea962008-06-20 01:15:47 +00002139 break;
2140 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002141
2142 DenseMap<uint32_t, Value*>::iterator predV =
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002143 localAvail[*PI]->table.find(ValNo);
Owen Anderson1b3ea962008-06-20 01:15:47 +00002144 if (predV == localAvail[*PI]->table.end()) {
Owen Anderson6a903bc2008-06-18 21:41:49 +00002145 PREPred = *PI;
Dan Gohmand2d1ae12010-06-22 15:08:57 +00002146 ++NumWithout;
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002147 } else if (predV->second == CurInst) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002148 NumWithout = 2;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002149 } else {
Owen Anderson1b3ea962008-06-20 01:15:47 +00002150 predMap[*PI] = predV->second;
Dan Gohmand2d1ae12010-06-22 15:08:57 +00002151 ++NumWith;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002152 }
2153 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002154
Owen Anderson6a903bc2008-06-18 21:41:49 +00002155 // Don't do PRE when it might increase code size, i.e. when
2156 // we would need to insert instructions in more than one pred.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002157 if (NumWithout != 1 || NumWith == 0)
Owen Anderson6a903bc2008-06-18 21:41:49 +00002158 continue;
Chris Lattnera546dcf2009-10-31 22:11:15 +00002159
2160 // Don't do PRE across indirect branch.
2161 if (isa<IndirectBrInst>(PREPred->getTerminator()))
2162 continue;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002163
Owen Andersonfdf9f162008-06-19 19:54:19 +00002164 // We can't do PRE safely on a critical edge, so instead we schedule
2165 // the edge to be split and perform the PRE the next time we iterate
2166 // on the function.
Bob Wilsonaff96b22010-02-16 21:06:42 +00002167 unsigned SuccNum = GetSuccessorNumber(PREPred, CurrentBlock);
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002168 if (isCriticalEdge(PREPred->getTerminator(), SuccNum)) {
2169 toSplit.push_back(std::make_pair(PREPred->getTerminator(), SuccNum));
Owen Andersonfdf9f162008-06-19 19:54:19 +00002170 continue;
2171 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002172
Bob Wilson76e8c592010-02-03 00:33:21 +00002173 // Instantiate the expression in the predecessor that lacked it.
Owen Anderson6a903bc2008-06-18 21:41:49 +00002174 // Because we are going top-down through the block, all value numbers
2175 // will be available in the predecessor by the time we need them. Any
Bob Wilson76e8c592010-02-03 00:33:21 +00002176 // that weren't originally present will have been instantiated earlier
Owen Anderson6a903bc2008-06-18 21:41:49 +00002177 // in this loop.
Nick Lewycky42fb7452009-09-27 07:38:41 +00002178 Instruction *PREInstr = CurInst->clone();
Owen Anderson6a903bc2008-06-18 21:41:49 +00002179 bool success = true;
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002180 for (unsigned i = 0, e = CurInst->getNumOperands(); i != e; ++i) {
2181 Value *Op = PREInstr->getOperand(i);
2182 if (isa<Argument>(Op) || isa<Constant>(Op) || isa<GlobalValue>(Op))
2183 continue;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002184
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002185 if (Value *V = lookupNumber(PREPred, VN.lookup(Op))) {
2186 PREInstr->setOperand(i, V);
2187 } else {
2188 success = false;
2189 break;
Owen Anderson8e462e92008-07-11 20:05:13 +00002190 }
Owen Anderson6a903bc2008-06-18 21:41:49 +00002191 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002192
Owen Anderson6a903bc2008-06-18 21:41:49 +00002193 // Fail out if we encounter an operand that is not available in
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002194 // the PRE predecessor. This is typically because of loads which
Owen Anderson6a903bc2008-06-18 21:41:49 +00002195 // are not value numbered precisely.
2196 if (!success) {
2197 delete PREInstr;
Bill Wendling3c793442008-12-22 22:14:07 +00002198 DEBUG(verifyRemoved(PREInstr));
Owen Anderson6a903bc2008-06-18 21:41:49 +00002199 continue;
2200 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002201
Owen Anderson6a903bc2008-06-18 21:41:49 +00002202 PREInstr->insertBefore(PREPred->getTerminator());
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002203 PREInstr->setName(CurInst->getName() + ".pre");
Owen Anderson1b3ea962008-06-20 01:15:47 +00002204 predMap[PREPred] = PREInstr;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002205 VN.add(PREInstr, ValNo);
Dan Gohmand2d1ae12010-06-22 15:08:57 +00002206 ++NumGVNPRE;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002207
Owen Anderson6a903bc2008-06-18 21:41:49 +00002208 // Update the availability map to include the new instruction.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002209 localAvail[PREPred]->table.insert(std::make_pair(ValNo, PREInstr));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002210
Owen Anderson6a903bc2008-06-18 21:41:49 +00002211 // Create a PHI to make the value available in this block.
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002212 PHINode* Phi = PHINode::Create(CurInst->getType(),
2213 CurInst->getName() + ".pre-phi",
Owen Anderson6a903bc2008-06-18 21:41:49 +00002214 CurrentBlock->begin());
2215 for (pred_iterator PI = pred_begin(CurrentBlock),
2216 PE = pred_end(CurrentBlock); PI != PE; ++PI)
Owen Anderson1b3ea962008-06-20 01:15:47 +00002217 Phi->addIncoming(predMap[*PI], *PI);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002218
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002219 VN.add(Phi, ValNo);
2220 localAvail[CurrentBlock]->table[ValNo] = Phi;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002221
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002222 CurInst->replaceAllUsesWith(Phi);
Duncan Sands19d0b472010-02-16 11:11:14 +00002223 if (MD && Phi->getType()->isPointerTy())
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00002224 MD->invalidateCachedPointerInfo(Phi);
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002225 VN.erase(CurInst);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002226
David Greene2e6efc42010-01-05 01:27:17 +00002227 DEBUG(dbgs() << "GVN PRE removed: " << *CurInst << '\n');
Dan Gohman81132462009-11-14 02:27:51 +00002228 if (MD) MD->removeInstruction(CurInst);
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002229 CurInst->eraseFromParent();
Bill Wendlingebb6a542008-12-22 21:57:30 +00002230 DEBUG(verifyRemoved(CurInst));
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002231 Changed = true;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002232 }
2233 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002234
Bob Wilson92cdb6e2010-02-16 19:51:59 +00002235 if (splitCriticalEdges())
2236 Changed = true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002237
Bob Wilson92cdb6e2010-02-16 19:51:59 +00002238 return Changed;
2239}
2240
2241/// splitCriticalEdges - Split critical edges found during the previous
2242/// iteration that may enable further optimization.
2243bool GVN::splitCriticalEdges() {
2244 if (toSplit.empty())
2245 return false;
2246 do {
2247 std::pair<TerminatorInst*, unsigned> Edge = toSplit.pop_back_val();
2248 SplitCriticalEdge(Edge.first, Edge.second, this);
2249 } while (!toSplit.empty());
Evan Cheng7263cf8432010-03-01 22:23:12 +00002250 if (MD) MD->invalidateCachedPredecessors();
Bob Wilson92cdb6e2010-02-16 19:51:59 +00002251 return true;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002252}
2253
Bill Wendling456e8852008-12-22 22:32:22 +00002254/// iterateOnFunction - Executes one iteration of GVN
Owen Anderson676070d2007-08-14 18:04:11 +00002255bool GVN::iterateOnFunction(Function &F) {
Nuno Lopese3127f32008-10-10 16:25:50 +00002256 cleanupGlobalSets();
Chris Lattnerbeb216d2008-03-21 21:33:23 +00002257
Owen Anderson98f912b2009-04-01 23:53:49 +00002258 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2259 DE = df_end(DT->getRootNode()); DI != DE; ++DI) {
2260 if (DI->getIDom())
2261 localAvail[DI->getBlock()] =
2262 new ValueNumberScope(localAvail[DI->getIDom()->getBlock()]);
2263 else
2264 localAvail[DI->getBlock()] = new ValueNumberScope(0);
2265 }
2266
Owen Andersonab6ec2e2007-07-24 17:55:58 +00002267 // Top-down walk of the dominator tree
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002268 bool Changed = false;
Owen Anderson03aacba2008-12-15 03:52:17 +00002269#if 0
2270 // Needed for value numbering with phi construction to work.
Owen Andersonbfe133e2008-12-15 02:03:00 +00002271 ReversePostOrderTraversal<Function*> RPOT(&F);
2272 for (ReversePostOrderTraversal<Function*>::rpo_iterator RI = RPOT.begin(),
2273 RE = RPOT.end(); RI != RE; ++RI)
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002274 Changed |= processBlock(*RI);
Owen Anderson03aacba2008-12-15 03:52:17 +00002275#else
2276 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2277 DE = df_end(DT->getRootNode()); DI != DE; ++DI)
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002278 Changed |= processBlock(DI->getBlock());
Owen Anderson03aacba2008-12-15 03:52:17 +00002279#endif
2280
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002281 return Changed;
Owen Andersonab6ec2e2007-07-24 17:55:58 +00002282}
Nuno Lopese3127f32008-10-10 16:25:50 +00002283
2284void GVN::cleanupGlobalSets() {
2285 VN.clear();
Nuno Lopese3127f32008-10-10 16:25:50 +00002286
2287 for (DenseMap<BasicBlock*, ValueNumberScope*>::iterator
2288 I = localAvail.begin(), E = localAvail.end(); I != E; ++I)
2289 delete I->second;
2290 localAvail.clear();
2291}
Bill Wendling6b18a392008-12-22 21:36:08 +00002292
2293/// verifyRemoved - Verify that the specified instruction does not occur in our
2294/// internal data structures.
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002295void GVN::verifyRemoved(const Instruction *Inst) const {
2296 VN.verifyRemoved(Inst);
Bill Wendling3c793442008-12-22 22:14:07 +00002297
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002298 // Walk through the value number scope to make sure the instruction isn't
2299 // ferreted away in it.
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +00002300 for (DenseMap<BasicBlock*, ValueNumberScope*>::const_iterator
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002301 I = localAvail.begin(), E = localAvail.end(); I != E; ++I) {
2302 const ValueNumberScope *VNS = I->second;
2303
2304 while (VNS) {
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +00002305 for (DenseMap<uint32_t, Value*>::const_iterator
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002306 II = VNS->table.begin(), IE = VNS->table.end(); II != IE; ++II) {
2307 assert(II->second != Inst && "Inst still in value numbering scope!");
2308 }
2309
2310 VNS = VNS->parent;
Bill Wendling3c793442008-12-22 22:14:07 +00002311 }
2312 }
Bill Wendling6b18a392008-12-22 21:36:08 +00002313}