blob: ed4e0d614887c6cb3977c747ec41189ee1ee6ebb [file] [log] [blame]
Chris Lattner159b98f2008-12-05 07:49:08 +00001//===- GVN.cpp - Eliminate redundant values and loads ---------------------===//
Owen Anderson85c40642007-07-24 17:55:58 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner081ce942007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Owen Anderson85c40642007-07-24 17:55:58 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This pass performs global value numbering to eliminate fully redundant
11// instructions. It also performs simple dead load elimination.
12//
John Criswell6e0aa282009-03-10 15:04:53 +000013// Note that this pass does the value numbering itself; it does not use the
Matthijs Kooijman9aac1db2008-06-05 07:55:49 +000014// ValueNumbering analysis passes.
15//
Owen Anderson85c40642007-07-24 17:55:58 +000016//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "gvn"
Owen Anderson85c40642007-07-24 17:55:58 +000019#include "llvm/Transforms/Scalar.h"
Owen Anderson5d72a422007-07-25 19:57:03 +000020#include "llvm/BasicBlock.h"
Owen Andersonacfa3ad2007-07-26 18:26:51 +000021#include "llvm/Constants.h"
Owen Anderson85c40642007-07-24 17:55:58 +000022#include "llvm/DerivedTypes.h"
Owen Andersonacfa3ad2007-07-26 18:26:51 +000023#include "llvm/Function.h"
Devang Patela7379552009-03-06 02:59:27 +000024#include "llvm/IntrinsicInst.h"
Owen Anderson24be4c12009-07-03 00:17:18 +000025#include "llvm/LLVMContext.h"
Chris Lattner0907b522009-09-21 05:57:11 +000026#include "llvm/Operator.h"
Owen Andersonacfa3ad2007-07-26 18:26:51 +000027#include "llvm/Value.h"
Owen Anderson85c40642007-07-24 17:55:58 +000028#include "llvm/ADT/DenseMap.h"
29#include "llvm/ADT/DepthFirstIterator.h"
Owen Andersona03e7862008-12-15 02:03:00 +000030#include "llvm/ADT/PostOrderIterator.h"
Owen Anderson85c40642007-07-24 17:55:58 +000031#include "llvm/ADT/SmallPtrSet.h"
32#include "llvm/ADT/SmallVector.h"
33#include "llvm/ADT/Statistic.h"
Owen Anderson5e9366f2007-10-18 19:39:33 +000034#include "llvm/Analysis/AliasAnalysis.h"
Chris Lattner4bb632f2009-12-06 05:29:56 +000035#include "llvm/Analysis/ConstantFolding.h"
36#include "llvm/Analysis/Dominators.h"
Victor Hernandez28f4d2f2009-10-27 20:05:49 +000037#include "llvm/Analysis/MemoryBuiltins.h"
Owen Anderson85c40642007-07-24 17:55:58 +000038#include "llvm/Analysis/MemoryDependenceAnalysis.h"
Chris Lattnerefff3222009-12-09 01:59:31 +000039#include "llvm/Analysis/PHITransAddr.h"
Owen Anderson85c40642007-07-24 17:55:58 +000040#include "llvm/Support/CFG.h"
Owen Andersona2bf7662008-06-19 19:57:25 +000041#include "llvm/Support/CommandLine.h"
Chris Lattner9c5be3c2008-03-29 04:36:18 +000042#include "llvm/Support/Debug.h"
Edwin Török675d5622009-07-11 20:10:48 +000043#include "llvm/Support/ErrorHandling.h"
Chris Lattner0907b522009-09-21 05:57:11 +000044#include "llvm/Support/GetElementPtrTypeIterator.h"
Chris Lattnercb00f732009-12-06 01:57:02 +000045#include "llvm/Support/IRBuilder.h"
Daniel Dunbar005975c2009-07-25 00:23:56 +000046#include "llvm/Support/raw_ostream.h"
Chris Lattner7741aa52009-09-20 19:03:47 +000047#include "llvm/Target/TargetData.h"
Owen Andersonec747c42008-06-19 19:54:19 +000048#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Dale Johannesena19b67f2009-06-17 20:48:23 +000049#include "llvm/Transforms/Utils/Local.h"
Chris Lattner6e5ea272009-10-10 23:50:30 +000050#include "llvm/Transforms/Utils/SSAUpdater.h"
Duncan Sands05f68372008-10-08 07:23:46 +000051#include <cstdio>
Owen Anderson85c40642007-07-24 17:55:58 +000052using namespace llvm;
53
Bill Wendling3858cae2008-12-22 22:14:07 +000054STATISTIC(NumGVNInstr, "Number of instructions deleted");
55STATISTIC(NumGVNLoad, "Number of loads deleted");
56STATISTIC(NumGVNPRE, "Number of instructions PRE'd");
Owen Anderson7558f202008-07-15 16:28:06 +000057STATISTIC(NumGVNBlocks, "Number of blocks merged");
Bill Wendling3858cae2008-12-22 22:14:07 +000058STATISTIC(NumPRELoad, "Number of loads PRE'd");
Chris Lattner1be83222008-03-22 04:13:49 +000059
Evan Cheng019a2e12008-06-20 01:01:07 +000060static cl::opt<bool> EnablePRE("enable-pre",
Owen Anderson3a053612008-07-17 19:41:00 +000061 cl::init(true), cl::Hidden);
Dan Gohman828f89f2009-06-15 18:30:15 +000062static cl::opt<bool> EnableLoadPRE("enable-load-pre", cl::init(true));
Owen Andersona2bf7662008-06-19 19:57:25 +000063
Owen Anderson85c40642007-07-24 17:55:58 +000064//===----------------------------------------------------------------------===//
65// ValueTable Class
66//===----------------------------------------------------------------------===//
67
68/// This class holds the mapping between values and value numbers. It is used
69/// as an efficient mechanism to determine the expression-wise equivalence of
70/// two values.
71namespace {
Chris Lattnerfa2d1ba2009-09-02 06:11:42 +000072 struct Expression {
Dan Gohman7ce405e2009-06-04 22:49:04 +000073 enum ExpressionOpcode { ADD, FADD, SUB, FSUB, MUL, FMUL,
74 UDIV, SDIV, FDIV, UREM, SREM,
Daniel Dunbar3be44e62009-09-20 02:20:51 +000075 FREM, SHL, LSHR, ASHR, AND, OR, XOR, ICMPEQ,
76 ICMPNE, ICMPUGT, ICMPUGE, ICMPULT, ICMPULE,
77 ICMPSGT, ICMPSGE, ICMPSLT, ICMPSLE, FCMPOEQ,
78 FCMPOGT, FCMPOGE, FCMPOLT, FCMPOLE, FCMPONE,
79 FCMPORD, FCMPUNO, FCMPUEQ, FCMPUGT, FCMPUGE,
Owen Anderson85c40642007-07-24 17:55:58 +000080 FCMPULT, FCMPULE, FCMPUNE, EXTRACT, INSERT,
81 SHUFFLE, SELECT, TRUNC, ZEXT, SEXT, FPTOUI,
Daniel Dunbar3be44e62009-09-20 02:20:51 +000082 FPTOSI, UITOFP, SITOFP, FPTRUNC, FPEXT,
Owen Anderson771d1122008-05-13 08:17:22 +000083 PTRTOINT, INTTOPTR, BITCAST, GEP, CALL, CONSTANT,
Owen Andersona7d4c7c2009-10-19 22:14:22 +000084 INSERTVALUE, EXTRACTVALUE, EMPTY, TOMBSTONE };
Owen Anderson85c40642007-07-24 17:55:58 +000085
86 ExpressionOpcode opcode;
87 const Type* type;
Owen Anderson85c40642007-07-24 17:55:58 +000088 SmallVector<uint32_t, 4> varargs;
Chris Lattnerff36c952009-09-21 02:42:51 +000089 Value *function;
Daniel Dunbar3be44e62009-09-20 02:20:51 +000090
Owen Anderson85c40642007-07-24 17:55:58 +000091 Expression() { }
92 Expression(ExpressionOpcode o) : opcode(o) { }
Daniel Dunbar3be44e62009-09-20 02:20:51 +000093
Owen Anderson85c40642007-07-24 17:55:58 +000094 bool operator==(const Expression &other) const {
95 if (opcode != other.opcode)
96 return false;
97 else if (opcode == EMPTY || opcode == TOMBSTONE)
98 return true;
99 else if (type != other.type)
100 return false;
Owen Anderson5e9366f2007-10-18 19:39:33 +0000101 else if (function != other.function)
102 return false;
Owen Anderson85c40642007-07-24 17:55:58 +0000103 else {
104 if (varargs.size() != other.varargs.size())
105 return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000106
Owen Anderson85c40642007-07-24 17:55:58 +0000107 for (size_t i = 0; i < varargs.size(); ++i)
108 if (varargs[i] != other.varargs[i])
109 return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000110
Owen Anderson85c40642007-07-24 17:55:58 +0000111 return true;
112 }
113 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000114
Owen Anderson85c40642007-07-24 17:55:58 +0000115 bool operator!=(const Expression &other) const {
Bill Wendling9b5d4b72008-12-22 22:16:31 +0000116 return !(*this == other);
Owen Anderson85c40642007-07-24 17:55:58 +0000117 }
118 };
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000119
Chris Lattnerfa2d1ba2009-09-02 06:11:42 +0000120 class ValueTable {
Owen Anderson85c40642007-07-24 17:55:58 +0000121 private:
122 DenseMap<Value*, uint32_t> valueNumbering;
123 DenseMap<Expression, uint32_t> expressionNumbering;
Owen Andersonbcf2bd52008-05-12 20:15:55 +0000124 AliasAnalysis* AA;
125 MemoryDependenceAnalysis* MD;
126 DominatorTree* DT;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000127
Owen Anderson85c40642007-07-24 17:55:58 +0000128 uint32_t nextValueNumber;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000129
Owen Anderson85c40642007-07-24 17:55:58 +0000130 Expression::ExpressionOpcode getOpcode(BinaryOperator* BO);
131 Expression::ExpressionOpcode getOpcode(CmpInst* C);
132 Expression::ExpressionOpcode getOpcode(CastInst* C);
133 Expression create_expression(BinaryOperator* BO);
134 Expression create_expression(CmpInst* C);
135 Expression create_expression(ShuffleVectorInst* V);
136 Expression create_expression(ExtractElementInst* C);
137 Expression create_expression(InsertElementInst* V);
138 Expression create_expression(SelectInst* V);
139 Expression create_expression(CastInst* C);
140 Expression create_expression(GetElementPtrInst* G);
Owen Anderson5e9366f2007-10-18 19:39:33 +0000141 Expression create_expression(CallInst* C);
Owen Anderson771d1122008-05-13 08:17:22 +0000142 Expression create_expression(Constant* C);
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000143 Expression create_expression(ExtractValueInst* C);
144 Expression create_expression(InsertValueInst* C);
145
146 uint32_t lookup_or_add_call(CallInst* C);
Owen Anderson85c40642007-07-24 17:55:58 +0000147 public:
Dan Gohman936a6522009-04-01 16:37:47 +0000148 ValueTable() : nextValueNumber(1) { }
Chris Lattnerff36c952009-09-21 02:42:51 +0000149 uint32_t lookup_or_add(Value *V);
150 uint32_t lookup(Value *V) const;
151 void add(Value *V, uint32_t num);
Owen Anderson85c40642007-07-24 17:55:58 +0000152 void clear();
Chris Lattnerff36c952009-09-21 02:42:51 +0000153 void erase(Value *v);
Owen Anderson85c40642007-07-24 17:55:58 +0000154 unsigned size();
Owen Andersonbcf2bd52008-05-12 20:15:55 +0000155 void setAliasAnalysis(AliasAnalysis* A) { AA = A; }
Chris Lattner02ca4422008-12-01 00:40:32 +0000156 AliasAnalysis *getAliasAnalysis() const { return AA; }
Owen Andersonbcf2bd52008-05-12 20:15:55 +0000157 void setMemDep(MemoryDependenceAnalysis* M) { MD = M; }
158 void setDomTree(DominatorTree* D) { DT = D; }
Owen Anderson8a8d13c2008-07-03 17:44:33 +0000159 uint32_t getNextUnusedValueNumber() { return nextValueNumber; }
Bill Wendling2a023742008-12-22 21:36:08 +0000160 void verifyRemoved(const Value *) const;
Owen Anderson85c40642007-07-24 17:55:58 +0000161 };
162}
163
164namespace llvm {
Chris Lattner92eea072007-09-17 18:34:04 +0000165template <> struct DenseMapInfo<Expression> {
Owen Andersonbf8a3eb2007-08-02 18:16:06 +0000166 static inline Expression getEmptyKey() {
167 return Expression(Expression::EMPTY);
168 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000169
Owen Andersonbf8a3eb2007-08-02 18:16:06 +0000170 static inline Expression getTombstoneKey() {
171 return Expression(Expression::TOMBSTONE);
172 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000173
Owen Anderson85c40642007-07-24 17:55:58 +0000174 static unsigned getHashValue(const Expression e) {
175 unsigned hash = e.opcode;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000176
Anton Korobeynikov8522e1c2008-02-20 11:26:25 +0000177 hash = ((unsigned)((uintptr_t)e.type >> 4) ^
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000178 (unsigned)((uintptr_t)e.type >> 9));
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000179
Owen Andersonbf8a3eb2007-08-02 18:16:06 +0000180 for (SmallVector<uint32_t, 4>::const_iterator I = e.varargs.begin(),
181 E = e.varargs.end(); I != E; ++I)
Owen Anderson85c40642007-07-24 17:55:58 +0000182 hash = *I + hash * 37;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000183
Anton Korobeynikov8522e1c2008-02-20 11:26:25 +0000184 hash = ((unsigned)((uintptr_t)e.function >> 4) ^
185 (unsigned)((uintptr_t)e.function >> 9)) +
186 hash * 37;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000187
Owen Anderson85c40642007-07-24 17:55:58 +0000188 return hash;
189 }
Chris Lattner92eea072007-09-17 18:34:04 +0000190 static bool isEqual(const Expression &LHS, const Expression &RHS) {
191 return LHS == RHS;
192 }
Owen Anderson85c40642007-07-24 17:55:58 +0000193 static bool isPod() { return true; }
194};
195}
196
197//===----------------------------------------------------------------------===//
198// ValueTable Internal Functions
199//===----------------------------------------------------------------------===//
Chris Lattner3d7103e2008-03-21 21:14:38 +0000200Expression::ExpressionOpcode ValueTable::getOpcode(BinaryOperator* BO) {
Owen Anderson85c40642007-07-24 17:55:58 +0000201 switch(BO->getOpcode()) {
Chris Lattner3d7103e2008-03-21 21:14:38 +0000202 default: // THIS SHOULD NEVER HAPPEN
Edwin Törökbd448e32009-07-14 16:55:14 +0000203 llvm_unreachable("Binary operator with unknown opcode?");
Chris Lattner3d7103e2008-03-21 21:14:38 +0000204 case Instruction::Add: return Expression::ADD;
Dan Gohman7ce405e2009-06-04 22:49:04 +0000205 case Instruction::FAdd: return Expression::FADD;
Chris Lattner3d7103e2008-03-21 21:14:38 +0000206 case Instruction::Sub: return Expression::SUB;
Dan Gohman7ce405e2009-06-04 22:49:04 +0000207 case Instruction::FSub: return Expression::FSUB;
Chris Lattner3d7103e2008-03-21 21:14:38 +0000208 case Instruction::Mul: return Expression::MUL;
Dan Gohman7ce405e2009-06-04 22:49:04 +0000209 case Instruction::FMul: return Expression::FMUL;
Chris Lattner3d7103e2008-03-21 21:14:38 +0000210 case Instruction::UDiv: return Expression::UDIV;
211 case Instruction::SDiv: return Expression::SDIV;
212 case Instruction::FDiv: return Expression::FDIV;
213 case Instruction::URem: return Expression::UREM;
214 case Instruction::SRem: return Expression::SREM;
215 case Instruction::FRem: return Expression::FREM;
216 case Instruction::Shl: return Expression::SHL;
217 case Instruction::LShr: return Expression::LSHR;
218 case Instruction::AShr: return Expression::ASHR;
219 case Instruction::And: return Expression::AND;
220 case Instruction::Or: return Expression::OR;
221 case Instruction::Xor: return Expression::XOR;
Owen Anderson85c40642007-07-24 17:55:58 +0000222 }
223}
224
225Expression::ExpressionOpcode ValueTable::getOpcode(CmpInst* C) {
Nick Lewycky8f5253b2009-07-08 03:04:38 +0000226 if (isa<ICmpInst>(C)) {
Owen Anderson85c40642007-07-24 17:55:58 +0000227 switch (C->getPredicate()) {
Chris Lattner3d7103e2008-03-21 21:14:38 +0000228 default: // THIS SHOULD NEVER HAPPEN
Edwin Törökbd448e32009-07-14 16:55:14 +0000229 llvm_unreachable("Comparison with unknown predicate?");
Chris Lattner3d7103e2008-03-21 21:14:38 +0000230 case ICmpInst::ICMP_EQ: return Expression::ICMPEQ;
231 case ICmpInst::ICMP_NE: return Expression::ICMPNE;
232 case ICmpInst::ICMP_UGT: return Expression::ICMPUGT;
233 case ICmpInst::ICMP_UGE: return Expression::ICMPUGE;
234 case ICmpInst::ICMP_ULT: return Expression::ICMPULT;
235 case ICmpInst::ICMP_ULE: return Expression::ICMPULE;
236 case ICmpInst::ICMP_SGT: return Expression::ICMPSGT;
237 case ICmpInst::ICMP_SGE: return Expression::ICMPSGE;
238 case ICmpInst::ICMP_SLT: return Expression::ICMPSLT;
239 case ICmpInst::ICMP_SLE: return Expression::ICMPSLE;
Owen Anderson85c40642007-07-24 17:55:58 +0000240 }
Nick Lewycky8f5253b2009-07-08 03:04:38 +0000241 } else {
242 switch (C->getPredicate()) {
243 default: // THIS SHOULD NEVER HAPPEN
Edwin Törökbd448e32009-07-14 16:55:14 +0000244 llvm_unreachable("Comparison with unknown predicate?");
Nick Lewycky8f5253b2009-07-08 03:04:38 +0000245 case FCmpInst::FCMP_OEQ: return Expression::FCMPOEQ;
246 case FCmpInst::FCMP_OGT: return Expression::FCMPOGT;
247 case FCmpInst::FCMP_OGE: return Expression::FCMPOGE;
248 case FCmpInst::FCMP_OLT: return Expression::FCMPOLT;
249 case FCmpInst::FCMP_OLE: return Expression::FCMPOLE;
250 case FCmpInst::FCMP_ONE: return Expression::FCMPONE;
251 case FCmpInst::FCMP_ORD: return Expression::FCMPORD;
252 case FCmpInst::FCMP_UNO: return Expression::FCMPUNO;
253 case FCmpInst::FCMP_UEQ: return Expression::FCMPUEQ;
254 case FCmpInst::FCMP_UGT: return Expression::FCMPUGT;
255 case FCmpInst::FCMP_UGE: return Expression::FCMPUGE;
256 case FCmpInst::FCMP_ULT: return Expression::FCMPULT;
257 case FCmpInst::FCMP_ULE: return Expression::FCMPULE;
258 case FCmpInst::FCMP_UNE: return Expression::FCMPUNE;
259 }
Owen Anderson85c40642007-07-24 17:55:58 +0000260 }
261}
262
Chris Lattner3d7103e2008-03-21 21:14:38 +0000263Expression::ExpressionOpcode ValueTable::getOpcode(CastInst* C) {
Owen Anderson85c40642007-07-24 17:55:58 +0000264 switch(C->getOpcode()) {
Chris Lattner3d7103e2008-03-21 21:14:38 +0000265 default: // THIS SHOULD NEVER HAPPEN
Edwin Törökbd448e32009-07-14 16:55:14 +0000266 llvm_unreachable("Cast operator with unknown opcode?");
Chris Lattner3d7103e2008-03-21 21:14:38 +0000267 case Instruction::Trunc: return Expression::TRUNC;
268 case Instruction::ZExt: return Expression::ZEXT;
269 case Instruction::SExt: return Expression::SEXT;
270 case Instruction::FPToUI: return Expression::FPTOUI;
271 case Instruction::FPToSI: return Expression::FPTOSI;
272 case Instruction::UIToFP: return Expression::UITOFP;
273 case Instruction::SIToFP: return Expression::SITOFP;
274 case Instruction::FPTrunc: return Expression::FPTRUNC;
275 case Instruction::FPExt: return Expression::FPEXT;
276 case Instruction::PtrToInt: return Expression::PTRTOINT;
277 case Instruction::IntToPtr: return Expression::INTTOPTR;
278 case Instruction::BitCast: return Expression::BITCAST;
Owen Anderson85c40642007-07-24 17:55:58 +0000279 }
280}
281
Owen Anderson5e9366f2007-10-18 19:39:33 +0000282Expression ValueTable::create_expression(CallInst* C) {
283 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000284
Owen Anderson5e9366f2007-10-18 19:39:33 +0000285 e.type = C->getType();
Owen Anderson5e9366f2007-10-18 19:39:33 +0000286 e.function = C->getCalledFunction();
287 e.opcode = Expression::CALL;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000288
Owen Anderson5e9366f2007-10-18 19:39:33 +0000289 for (CallInst::op_iterator I = C->op_begin()+1, E = C->op_end();
290 I != E; ++I)
Owen Anderson07f478f2008-04-11 05:11:49 +0000291 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000292
Owen Anderson5e9366f2007-10-18 19:39:33 +0000293 return e;
294}
295
Owen Anderson85c40642007-07-24 17:55:58 +0000296Expression ValueTable::create_expression(BinaryOperator* BO) {
297 Expression e;
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000298 e.varargs.push_back(lookup_or_add(BO->getOperand(0)));
299 e.varargs.push_back(lookup_or_add(BO->getOperand(1)));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000300 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000301 e.type = BO->getType();
302 e.opcode = getOpcode(BO);
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000303
Owen Anderson85c40642007-07-24 17:55:58 +0000304 return e;
305}
306
307Expression ValueTable::create_expression(CmpInst* C) {
308 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000309
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000310 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
311 e.varargs.push_back(lookup_or_add(C->getOperand(1)));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000312 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000313 e.type = C->getType();
314 e.opcode = getOpcode(C);
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000315
Owen Anderson85c40642007-07-24 17:55:58 +0000316 return e;
317}
318
319Expression ValueTable::create_expression(CastInst* C) {
320 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000321
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000322 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000323 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000324 e.type = C->getType();
325 e.opcode = getOpcode(C);
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000326
Owen Anderson85c40642007-07-24 17:55:58 +0000327 return e;
328}
329
330Expression ValueTable::create_expression(ShuffleVectorInst* S) {
331 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000332
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000333 e.varargs.push_back(lookup_or_add(S->getOperand(0)));
334 e.varargs.push_back(lookup_or_add(S->getOperand(1)));
335 e.varargs.push_back(lookup_or_add(S->getOperand(2)));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000336 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000337 e.type = S->getType();
338 e.opcode = Expression::SHUFFLE;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000339
Owen Anderson85c40642007-07-24 17:55:58 +0000340 return e;
341}
342
343Expression ValueTable::create_expression(ExtractElementInst* E) {
344 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000345
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000346 e.varargs.push_back(lookup_or_add(E->getOperand(0)));
347 e.varargs.push_back(lookup_or_add(E->getOperand(1)));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000348 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000349 e.type = E->getType();
350 e.opcode = Expression::EXTRACT;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000351
Owen Anderson85c40642007-07-24 17:55:58 +0000352 return e;
353}
354
355Expression ValueTable::create_expression(InsertElementInst* I) {
356 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000357
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000358 e.varargs.push_back(lookup_or_add(I->getOperand(0)));
359 e.varargs.push_back(lookup_or_add(I->getOperand(1)));
360 e.varargs.push_back(lookup_or_add(I->getOperand(2)));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000361 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000362 e.type = I->getType();
363 e.opcode = Expression::INSERT;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000364
Owen Anderson85c40642007-07-24 17:55:58 +0000365 return e;
366}
367
368Expression ValueTable::create_expression(SelectInst* I) {
369 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000370
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000371 e.varargs.push_back(lookup_or_add(I->getCondition()));
372 e.varargs.push_back(lookup_or_add(I->getTrueValue()));
373 e.varargs.push_back(lookup_or_add(I->getFalseValue()));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000374 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000375 e.type = I->getType();
376 e.opcode = Expression::SELECT;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000377
Owen Anderson85c40642007-07-24 17:55:58 +0000378 return e;
379}
380
381Expression ValueTable::create_expression(GetElementPtrInst* G) {
382 Expression e;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000383
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000384 e.varargs.push_back(lookup_or_add(G->getPointerOperand()));
Owen Anderson5e9366f2007-10-18 19:39:33 +0000385 e.function = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000386 e.type = G->getType();
387 e.opcode = Expression::GEP;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000388
Owen Anderson85c40642007-07-24 17:55:58 +0000389 for (GetElementPtrInst::op_iterator I = G->idx_begin(), E = G->idx_end();
390 I != E; ++I)
Owen Anderson07f478f2008-04-11 05:11:49 +0000391 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000392
Owen Anderson85c40642007-07-24 17:55:58 +0000393 return e;
394}
395
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000396Expression ValueTable::create_expression(ExtractValueInst* E) {
397 Expression e;
398
399 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
400 for (ExtractValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
401 II != IE; ++II)
402 e.varargs.push_back(*II);
403 e.function = 0;
404 e.type = E->getType();
405 e.opcode = Expression::EXTRACTVALUE;
406
407 return e;
408}
409
410Expression ValueTable::create_expression(InsertValueInst* E) {
411 Expression e;
412
413 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
414 e.varargs.push_back(lookup_or_add(E->getInsertedValueOperand()));
415 for (InsertValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
416 II != IE; ++II)
417 e.varargs.push_back(*II);
418 e.function = 0;
419 e.type = E->getType();
420 e.opcode = Expression::INSERTVALUE;
421
422 return e;
423}
424
Owen Anderson85c40642007-07-24 17:55:58 +0000425//===----------------------------------------------------------------------===//
426// ValueTable External Functions
427//===----------------------------------------------------------------------===//
428
Owen Andersone6b4ff82008-06-18 21:41:49 +0000429/// add - Insert a value into the table with a specified value number.
Chris Lattnerff36c952009-09-21 02:42:51 +0000430void ValueTable::add(Value *V, uint32_t num) {
Owen Andersone6b4ff82008-06-18 21:41:49 +0000431 valueNumbering.insert(std::make_pair(V, num));
432}
433
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000434uint32_t ValueTable::lookup_or_add_call(CallInst* C) {
435 if (AA->doesNotAccessMemory(C)) {
436 Expression exp = create_expression(C);
437 uint32_t& e = expressionNumbering[exp];
438 if (!e) e = nextValueNumber++;
439 valueNumbering[C] = e;
440 return e;
441 } else if (AA->onlyReadsMemory(C)) {
442 Expression exp = create_expression(C);
443 uint32_t& e = expressionNumbering[exp];
444 if (!e) {
445 e = nextValueNumber++;
446 valueNumbering[C] = e;
447 return e;
448 }
Dan Gohmanc8d26652009-11-14 02:27:51 +0000449 if (!MD) {
450 e = nextValueNumber++;
451 valueNumbering[C] = e;
452 return e;
453 }
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000454
455 MemDepResult local_dep = MD->getDependency(C);
456
457 if (!local_dep.isDef() && !local_dep.isNonLocal()) {
458 valueNumbering[C] = nextValueNumber;
459 return nextValueNumber++;
460 }
461
462 if (local_dep.isDef()) {
463 CallInst* local_cdep = cast<CallInst>(local_dep.getInst());
464
465 if (local_cdep->getNumOperands() != C->getNumOperands()) {
466 valueNumbering[C] = nextValueNumber;
467 return nextValueNumber++;
468 }
469
470 for (unsigned i = 1; i < C->getNumOperands(); ++i) {
471 uint32_t c_vn = lookup_or_add(C->getOperand(i));
472 uint32_t cd_vn = lookup_or_add(local_cdep->getOperand(i));
473 if (c_vn != cd_vn) {
474 valueNumbering[C] = nextValueNumber;
475 return nextValueNumber++;
476 }
477 }
478
479 uint32_t v = lookup_or_add(local_cdep);
480 valueNumbering[C] = v;
481 return v;
482 }
483
484 // Non-local case.
485 const MemoryDependenceAnalysis::NonLocalDepInfo &deps =
486 MD->getNonLocalCallDependency(CallSite(C));
487 // FIXME: call/call dependencies for readonly calls should return def, not
488 // clobber! Move the checking logic to MemDep!
489 CallInst* cdep = 0;
490
491 // Check to see if we have a single dominating call instruction that is
492 // identical to C.
493 for (unsigned i = 0, e = deps.size(); i != e; ++i) {
Chris Lattner1a957962009-12-09 07:08:01 +0000494 const NonLocalDepEntry *I = &deps[i];
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000495 // Ignore non-local dependencies.
Chris Lattner1a957962009-12-09 07:08:01 +0000496 if (I->getResult().isNonLocal())
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000497 continue;
498
499 // We don't handle non-depedencies. If we already have a call, reject
500 // instruction dependencies.
Chris Lattner1a957962009-12-09 07:08:01 +0000501 if (I->getResult().isClobber() || cdep != 0) {
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000502 cdep = 0;
503 break;
504 }
505
Chris Lattner1a957962009-12-09 07:08:01 +0000506 CallInst *NonLocalDepCall = dyn_cast<CallInst>(I->getResult().getInst());
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000507 // FIXME: All duplicated with non-local case.
Chris Lattner1a957962009-12-09 07:08:01 +0000508 if (NonLocalDepCall && DT->properlyDominates(I->getBB(), C->getParent())){
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000509 cdep = NonLocalDepCall;
510 continue;
511 }
512
513 cdep = 0;
514 break;
515 }
516
517 if (!cdep) {
518 valueNumbering[C] = nextValueNumber;
519 return nextValueNumber++;
520 }
521
522 if (cdep->getNumOperands() != C->getNumOperands()) {
523 valueNumbering[C] = nextValueNumber;
524 return nextValueNumber++;
525 }
526 for (unsigned i = 1; i < C->getNumOperands(); ++i) {
527 uint32_t c_vn = lookup_or_add(C->getOperand(i));
528 uint32_t cd_vn = lookup_or_add(cdep->getOperand(i));
529 if (c_vn != cd_vn) {
530 valueNumbering[C] = nextValueNumber;
531 return nextValueNumber++;
532 }
533 }
534
535 uint32_t v = lookup_or_add(cdep);
536 valueNumbering[C] = v;
537 return v;
538
539 } else {
540 valueNumbering[C] = nextValueNumber;
541 return nextValueNumber++;
542 }
543}
544
Owen Anderson85c40642007-07-24 17:55:58 +0000545/// lookup_or_add - Returns the value number for the specified value, assigning
546/// it a new number if it did not have one before.
Chris Lattnerff36c952009-09-21 02:42:51 +0000547uint32_t ValueTable::lookup_or_add(Value *V) {
Owen Anderson85c40642007-07-24 17:55:58 +0000548 DenseMap<Value*, uint32_t>::iterator VI = valueNumbering.find(V);
549 if (VI != valueNumbering.end())
550 return VI->second;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000551
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000552 if (!isa<Instruction>(V)) {
Owen Andersonb472cd82009-10-19 21:14:57 +0000553 valueNumbering[V] = nextValueNumber;
Owen Anderson85c40642007-07-24 17:55:58 +0000554 return nextValueNumber++;
555 }
Owen Andersona7d4c7c2009-10-19 22:14:22 +0000556
557 Instruction* I = cast<Instruction>(V);
558 Expression exp;
559 switch (I->getOpcode()) {
560 case Instruction::Call:
561 return lookup_or_add_call(cast<CallInst>(I));
562 case Instruction::Add:
563 case Instruction::FAdd:
564 case Instruction::Sub:
565 case Instruction::FSub:
566 case Instruction::Mul:
567 case Instruction::FMul:
568 case Instruction::UDiv:
569 case Instruction::SDiv:
570 case Instruction::FDiv:
571 case Instruction::URem:
572 case Instruction::SRem:
573 case Instruction::FRem:
574 case Instruction::Shl:
575 case Instruction::LShr:
576 case Instruction::AShr:
577 case Instruction::And:
578 case Instruction::Or :
579 case Instruction::Xor:
580 exp = create_expression(cast<BinaryOperator>(I));
581 break;
582 case Instruction::ICmp:
583 case Instruction::FCmp:
584 exp = create_expression(cast<CmpInst>(I));
585 break;
586 case Instruction::Trunc:
587 case Instruction::ZExt:
588 case Instruction::SExt:
589 case Instruction::FPToUI:
590 case Instruction::FPToSI:
591 case Instruction::UIToFP:
592 case Instruction::SIToFP:
593 case Instruction::FPTrunc:
594 case Instruction::FPExt:
595 case Instruction::PtrToInt:
596 case Instruction::IntToPtr:
597 case Instruction::BitCast:
598 exp = create_expression(cast<CastInst>(I));
599 break;
600 case Instruction::Select:
601 exp = create_expression(cast<SelectInst>(I));
602 break;
603 case Instruction::ExtractElement:
604 exp = create_expression(cast<ExtractElementInst>(I));
605 break;
606 case Instruction::InsertElement:
607 exp = create_expression(cast<InsertElementInst>(I));
608 break;
609 case Instruction::ShuffleVector:
610 exp = create_expression(cast<ShuffleVectorInst>(I));
611 break;
612 case Instruction::ExtractValue:
613 exp = create_expression(cast<ExtractValueInst>(I));
614 break;
615 case Instruction::InsertValue:
616 exp = create_expression(cast<InsertValueInst>(I));
617 break;
618 case Instruction::GetElementPtr:
619 exp = create_expression(cast<GetElementPtrInst>(I));
620 break;
621 default:
622 valueNumbering[V] = nextValueNumber;
623 return nextValueNumber++;
624 }
625
626 uint32_t& e = expressionNumbering[exp];
627 if (!e) e = nextValueNumber++;
628 valueNumbering[V] = e;
629 return e;
Owen Anderson85c40642007-07-24 17:55:58 +0000630}
631
632/// lookup - Returns the value number of the specified value. Fails if
633/// the value has not yet been numbered.
Chris Lattnerff36c952009-09-21 02:42:51 +0000634uint32_t ValueTable::lookup(Value *V) const {
Jeffrey Yasskin8154d2e2009-11-10 01:02:17 +0000635 DenseMap<Value*, uint32_t>::const_iterator VI = valueNumbering.find(V);
Chris Lattner3d7103e2008-03-21 21:14:38 +0000636 assert(VI != valueNumbering.end() && "Value not numbered?");
637 return VI->second;
Owen Anderson85c40642007-07-24 17:55:58 +0000638}
639
640/// clear - Remove all entries from the ValueTable
641void ValueTable::clear() {
642 valueNumbering.clear();
643 expressionNumbering.clear();
644 nextValueNumber = 1;
645}
646
Owen Anderson5aff8002007-07-31 23:27:13 +0000647/// erase - Remove a value from the value numbering
Chris Lattnerff36c952009-09-21 02:42:51 +0000648void ValueTable::erase(Value *V) {
Owen Anderson5aff8002007-07-31 23:27:13 +0000649 valueNumbering.erase(V);
650}
651
Bill Wendling2a023742008-12-22 21:36:08 +0000652/// verifyRemoved - Verify that the value is removed from all internal data
653/// structures.
654void ValueTable::verifyRemoved(const Value *V) const {
Jeffrey Yasskin8154d2e2009-11-10 01:02:17 +0000655 for (DenseMap<Value*, uint32_t>::const_iterator
Bill Wendling2a023742008-12-22 21:36:08 +0000656 I = valueNumbering.begin(), E = valueNumbering.end(); I != E; ++I) {
657 assert(I->first != V && "Inst still occurs in value numbering map!");
658 }
659}
660
Owen Anderson85c40642007-07-24 17:55:58 +0000661//===----------------------------------------------------------------------===//
Bill Wendling42f17f62008-12-22 22:32:22 +0000662// GVN Pass
Owen Anderson85c40642007-07-24 17:55:58 +0000663//===----------------------------------------------------------------------===//
664
665namespace {
Chris Lattnerfa2d1ba2009-09-02 06:11:42 +0000666 struct ValueNumberScope {
Owen Anderson2a412722008-06-20 01:15:47 +0000667 ValueNumberScope* parent;
668 DenseMap<uint32_t, Value*> table;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000669
Owen Anderson2a412722008-06-20 01:15:47 +0000670 ValueNumberScope(ValueNumberScope* p) : parent(p) { }
671 };
672}
673
674namespace {
Owen Anderson85c40642007-07-24 17:55:58 +0000675
Chris Lattnerfa2d1ba2009-09-02 06:11:42 +0000676 class GVN : public FunctionPass {
Owen Anderson85c40642007-07-24 17:55:58 +0000677 bool runOnFunction(Function &F);
678 public:
679 static char ID; // Pass identification, replacement for typeid
Dan Gohmanc8d26652009-11-14 02:27:51 +0000680 explicit GVN(bool nopre = false, bool noloads = false)
681 : FunctionPass(&ID), NoPRE(nopre), NoLoads(noloads), MD(0) { }
Owen Anderson85c40642007-07-24 17:55:58 +0000682
683 private:
Evan Chengf036e552009-10-30 20:12:24 +0000684 bool NoPRE;
Dan Gohmanc8d26652009-11-14 02:27:51 +0000685 bool NoLoads;
Chris Lattner02ca4422008-12-01 00:40:32 +0000686 MemoryDependenceAnalysis *MD;
687 DominatorTree *DT;
688
Owen Anderson85c40642007-07-24 17:55:58 +0000689 ValueTable VN;
Owen Anderson2a412722008-06-20 01:15:47 +0000690 DenseMap<BasicBlock*, ValueNumberScope*> localAvail;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000691
Owen Anderson85c40642007-07-24 17:55:58 +0000692 // This transformation requires dominator postdominator info
693 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
Owen Anderson85c40642007-07-24 17:55:58 +0000694 AU.addRequired<DominatorTree>();
Dan Gohmanc8d26652009-11-14 02:27:51 +0000695 if (!NoLoads)
696 AU.addRequired<MemoryDependenceAnalysis>();
Owen Anderson5e9366f2007-10-18 19:39:33 +0000697 AU.addRequired<AliasAnalysis>();
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000698
Owen Andersonaef6a922008-06-23 17:49:45 +0000699 AU.addPreserved<DominatorTree>();
Owen Anderson5e9366f2007-10-18 19:39:33 +0000700 AU.addPreserved<AliasAnalysis>();
Owen Anderson85c40642007-07-24 17:55:58 +0000701 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000702
Owen Anderson85c40642007-07-24 17:55:58 +0000703 // Helper fuctions
704 // FIXME: eliminate or document these better
Owen Anderson85c40642007-07-24 17:55:58 +0000705 bool processLoad(LoadInst* L,
Chris Lattner7de20452008-03-21 22:01:16 +0000706 SmallVectorImpl<Instruction*> &toErase);
Chris Lattnerff36c952009-09-21 02:42:51 +0000707 bool processInstruction(Instruction *I,
Chris Lattner7de20452008-03-21 22:01:16 +0000708 SmallVectorImpl<Instruction*> &toErase);
Owen Andersonbf8a3eb2007-08-02 18:16:06 +0000709 bool processNonLocalLoad(LoadInst* L,
Chris Lattner7de20452008-03-21 22:01:16 +0000710 SmallVectorImpl<Instruction*> &toErase);
Chris Lattnerff36c952009-09-21 02:42:51 +0000711 bool processBlock(BasicBlock *BB);
Owen Andersone6b4ff82008-06-18 21:41:49 +0000712 void dump(DenseMap<uint32_t, Value*>& d);
Owen Andersonbe168b32007-08-14 18:04:11 +0000713 bool iterateOnFunction(Function &F);
Chris Lattnerff36c952009-09-21 02:42:51 +0000714 Value *CollapsePhi(PHINode* p);
Owen Andersone6b4ff82008-06-18 21:41:49 +0000715 bool performPRE(Function& F);
Chris Lattnerff36c952009-09-21 02:42:51 +0000716 Value *lookupNumber(BasicBlock *BB, uint32_t num);
Nuno Lopes274474b2008-10-10 16:25:50 +0000717 void cleanupGlobalSets();
Bill Wendling2a023742008-12-22 21:36:08 +0000718 void verifyRemoved(const Instruction *I) const;
Owen Anderson85c40642007-07-24 17:55:58 +0000719 };
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000720
Owen Anderson85c40642007-07-24 17:55:58 +0000721 char GVN::ID = 0;
Owen Anderson85c40642007-07-24 17:55:58 +0000722}
723
724// createGVNPass - The public interface to this file...
Dan Gohmanc8d26652009-11-14 02:27:51 +0000725FunctionPass *llvm::createGVNPass(bool NoPRE, bool NoLoads) {
726 return new GVN(NoPRE, NoLoads);
727}
Owen Anderson85c40642007-07-24 17:55:58 +0000728
729static RegisterPass<GVN> X("gvn",
730 "Global Value Numbering");
731
Owen Andersone6b4ff82008-06-18 21:41:49 +0000732void GVN::dump(DenseMap<uint32_t, Value*>& d) {
Owen Anderson5d72a422007-07-25 19:57:03 +0000733 printf("{\n");
Owen Andersone6b4ff82008-06-18 21:41:49 +0000734 for (DenseMap<uint32_t, Value*>::iterator I = d.begin(),
Owen Anderson5d72a422007-07-25 19:57:03 +0000735 E = d.end(); I != E; ++I) {
Owen Andersone6b4ff82008-06-18 21:41:49 +0000736 printf("%d\n", I->first);
Owen Anderson5d72a422007-07-25 19:57:03 +0000737 I->second->dump();
738 }
739 printf("}\n");
740}
741
Chris Lattnerff36c952009-09-21 02:42:51 +0000742static bool isSafeReplacement(PHINode* p, Instruction *inst) {
Owen Andersond68b1af2009-08-26 22:55:11 +0000743 if (!isa<PHINode>(inst))
744 return true;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000745
Owen Andersond68b1af2009-08-26 22:55:11 +0000746 for (Instruction::use_iterator UI = p->use_begin(), E = p->use_end();
747 UI != E; ++UI)
748 if (PHINode* use_phi = dyn_cast<PHINode>(UI))
749 if (use_phi->getParent() == inst->getParent())
750 return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000751
Owen Andersond68b1af2009-08-26 22:55:11 +0000752 return true;
753}
754
Chris Lattnerff36c952009-09-21 02:42:51 +0000755Value *GVN::CollapsePhi(PHINode *PN) {
756 Value *ConstVal = PN->hasConstantValue(DT);
757 if (!ConstVal) return 0;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000758
Chris Lattnerff36c952009-09-21 02:42:51 +0000759 Instruction *Inst = dyn_cast<Instruction>(ConstVal);
760 if (!Inst)
761 return ConstVal;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000762
Chris Lattnerff36c952009-09-21 02:42:51 +0000763 if (DT->dominates(Inst, PN))
764 if (isSafeReplacement(PN, Inst))
765 return Inst;
Owen Andersone02ad522007-08-16 22:51:56 +0000766 return 0;
767}
Owen Anderson5d72a422007-07-25 19:57:03 +0000768
Chris Lattnerdcded152008-12-02 08:16:11 +0000769/// IsValueFullyAvailableInBlock - Return true if we can prove that the value
770/// we're analyzing is fully available in the specified block. As we go, keep
Chris Lattner159b98f2008-12-05 07:49:08 +0000771/// track of which blocks we know are fully alive in FullyAvailableBlocks. This
772/// map is actually a tri-state map with the following values:
773/// 0) we know the block *is not* fully available.
774/// 1) we know the block *is* fully available.
775/// 2) we do not know whether the block is fully available or not, but we are
776/// currently speculating that it will be.
777/// 3) we are speculating for this block and have used that to speculate for
778/// other blocks.
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000779static bool IsValueFullyAvailableInBlock(BasicBlock *BB,
Chris Lattner159b98f2008-12-05 07:49:08 +0000780 DenseMap<BasicBlock*, char> &FullyAvailableBlocks) {
Chris Lattnerdcded152008-12-02 08:16:11 +0000781 // Optimistically assume that the block is fully available and check to see
782 // if we already know about this block in one lookup.
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000783 std::pair<DenseMap<BasicBlock*, char>::iterator, char> IV =
Chris Lattner159b98f2008-12-05 07:49:08 +0000784 FullyAvailableBlocks.insert(std::make_pair(BB, 2));
Chris Lattnerdcded152008-12-02 08:16:11 +0000785
786 // If the entry already existed for this block, return the precomputed value.
Chris Lattner159b98f2008-12-05 07:49:08 +0000787 if (!IV.second) {
788 // If this is a speculative "available" value, mark it as being used for
789 // speculation of other blocks.
790 if (IV.first->second == 2)
791 IV.first->second = 3;
792 return IV.first->second != 0;
793 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000794
Chris Lattnerdcded152008-12-02 08:16:11 +0000795 // Otherwise, see if it is fully available in all predecessors.
796 pred_iterator PI = pred_begin(BB), PE = pred_end(BB);
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000797
Chris Lattnerdcded152008-12-02 08:16:11 +0000798 // If this block has no predecessors, it isn't live-in here.
799 if (PI == PE)
Chris Lattner159b98f2008-12-05 07:49:08 +0000800 goto SpeculationFailure;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000801
Chris Lattnerdcded152008-12-02 08:16:11 +0000802 for (; PI != PE; ++PI)
803 // If the value isn't fully available in one of our predecessors, then it
804 // isn't fully available in this block either. Undo our previous
805 // optimistic assumption and bail out.
806 if (!IsValueFullyAvailableInBlock(*PI, FullyAvailableBlocks))
Chris Lattner159b98f2008-12-05 07:49:08 +0000807 goto SpeculationFailure;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000808
Chris Lattnerdcded152008-12-02 08:16:11 +0000809 return true;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000810
Chris Lattner159b98f2008-12-05 07:49:08 +0000811// SpeculationFailure - If we get here, we found out that this is not, after
812// all, a fully-available block. We have a problem if we speculated on this and
813// used the speculation to mark other blocks as available.
814SpeculationFailure:
815 char &BBVal = FullyAvailableBlocks[BB];
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000816
Chris Lattner159b98f2008-12-05 07:49:08 +0000817 // If we didn't speculate on this, just return with it set to false.
818 if (BBVal == 2) {
819 BBVal = 0;
820 return false;
821 }
822
823 // If we did speculate on this value, we could have blocks set to 1 that are
824 // incorrect. Walk the (transitive) successors of this block and mark them as
825 // 0 if set to one.
826 SmallVector<BasicBlock*, 32> BBWorklist;
827 BBWorklist.push_back(BB);
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000828
Chris Lattner159b98f2008-12-05 07:49:08 +0000829 while (!BBWorklist.empty()) {
830 BasicBlock *Entry = BBWorklist.pop_back_val();
831 // Note that this sets blocks to 0 (unavailable) if they happen to not
832 // already be in FullyAvailableBlocks. This is safe.
833 char &EntryVal = FullyAvailableBlocks[Entry];
834 if (EntryVal == 0) continue; // Already unavailable.
835
836 // Mark as unavailable.
837 EntryVal = 0;
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000838
Chris Lattner159b98f2008-12-05 07:49:08 +0000839 for (succ_iterator I = succ_begin(Entry), E = succ_end(Entry); I != E; ++I)
840 BBWorklist.push_back(*I);
841 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +0000842
Chris Lattner159b98f2008-12-05 07:49:08 +0000843 return false;
Chris Lattnerdcded152008-12-02 08:16:11 +0000844}
845
Chris Lattnerd6b1d052009-09-20 20:09:34 +0000846
Chris Lattner012b3602009-09-21 17:24:04 +0000847/// CanCoerceMustAliasedValueToLoad - Return true if
848/// CoerceAvailableValueToLoadType will succeed.
849static bool CanCoerceMustAliasedValueToLoad(Value *StoredVal,
850 const Type *LoadTy,
851 const TargetData &TD) {
852 // If the loaded or stored value is an first class array or struct, don't try
853 // to transform them. We need to be able to bitcast to integer.
854 if (isa<StructType>(LoadTy) || isa<ArrayType>(LoadTy) ||
855 isa<StructType>(StoredVal->getType()) ||
856 isa<ArrayType>(StoredVal->getType()))
857 return false;
858
859 // The store has to be at least as big as the load.
860 if (TD.getTypeSizeInBits(StoredVal->getType()) <
861 TD.getTypeSizeInBits(LoadTy))
862 return false;
863
864 return true;
865}
866
867
Chris Lattnerd6b1d052009-09-20 20:09:34 +0000868/// CoerceAvailableValueToLoadType - If we saw a store of a value to memory, and
869/// then a load from a must-aliased pointer of a different type, try to coerce
870/// the stored value. LoadedTy is the type of the load we want to replace and
871/// InsertPt is the place to insert new instructions.
872///
873/// If we can't do it, return null.
874static Value *CoerceAvailableValueToLoadType(Value *StoredVal,
875 const Type *LoadedTy,
876 Instruction *InsertPt,
877 const TargetData &TD) {
Chris Lattner012b3602009-09-21 17:24:04 +0000878 if (!CanCoerceMustAliasedValueToLoad(StoredVal, LoadedTy, TD))
879 return 0;
880
Chris Lattnerd6b1d052009-09-20 20:09:34 +0000881 const Type *StoredValTy = StoredVal->getType();
882
883 uint64_t StoreSize = TD.getTypeSizeInBits(StoredValTy);
884 uint64_t LoadSize = TD.getTypeSizeInBits(LoadedTy);
885
886 // If the store and reload are the same size, we can always reuse it.
887 if (StoreSize == LoadSize) {
888 if (isa<PointerType>(StoredValTy) && isa<PointerType>(LoadedTy)) {
889 // Pointer to Pointer -> use bitcast.
890 return new BitCastInst(StoredVal, LoadedTy, "", InsertPt);
891 }
892
893 // Convert source pointers to integers, which can be bitcast.
894 if (isa<PointerType>(StoredValTy)) {
895 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
896 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
897 }
898
899 const Type *TypeToCastTo = LoadedTy;
900 if (isa<PointerType>(TypeToCastTo))
901 TypeToCastTo = TD.getIntPtrType(StoredValTy->getContext());
902
903 if (StoredValTy != TypeToCastTo)
904 StoredVal = new BitCastInst(StoredVal, TypeToCastTo, "", InsertPt);
905
906 // Cast to pointer if the load needs a pointer type.
907 if (isa<PointerType>(LoadedTy))
908 StoredVal = new IntToPtrInst(StoredVal, LoadedTy, "", InsertPt);
909
910 return StoredVal;
911 }
912
913 // If the loaded value is smaller than the available value, then we can
914 // extract out a piece from it. If the available value is too small, then we
915 // can't do anything.
Chris Lattner012b3602009-09-21 17:24:04 +0000916 assert(StoreSize >= LoadSize && "CanCoerceMustAliasedValueToLoad fail");
Chris Lattnerd6b1d052009-09-20 20:09:34 +0000917
918 // Convert source pointers to integers, which can be manipulated.
919 if (isa<PointerType>(StoredValTy)) {
920 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
921 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
922 }
923
924 // Convert vectors and fp to integer, which can be manipulated.
925 if (!isa<IntegerType>(StoredValTy)) {
926 StoredValTy = IntegerType::get(StoredValTy->getContext(), StoreSize);
927 StoredVal = new BitCastInst(StoredVal, StoredValTy, "", InsertPt);
928 }
929
930 // If this is a big-endian system, we need to shift the value down to the low
931 // bits so that a truncate will work.
932 if (TD.isBigEndian()) {
933 Constant *Val = ConstantInt::get(StoredVal->getType(), StoreSize-LoadSize);
934 StoredVal = BinaryOperator::CreateLShr(StoredVal, Val, "tmp", InsertPt);
935 }
936
937 // Truncate the integer to the right size now.
938 const Type *NewIntTy = IntegerType::get(StoredValTy->getContext(), LoadSize);
939 StoredVal = new TruncInst(StoredVal, NewIntTy, "trunc", InsertPt);
940
941 if (LoadedTy == NewIntTy)
942 return StoredVal;
943
944 // If the result is a pointer, inttoptr.
945 if (isa<PointerType>(LoadedTy))
946 return new IntToPtrInst(StoredVal, LoadedTy, "inttoptr", InsertPt);
947
948 // Otherwise, bitcast.
949 return new BitCastInst(StoredVal, LoadedTy, "bitcast", InsertPt);
950}
951
Chris Lattner8f912082009-09-21 06:24:16 +0000952/// GetBaseWithConstantOffset - Analyze the specified pointer to see if it can
953/// be expressed as a base pointer plus a constant offset. Return the base and
954/// offset to the caller.
955static Value *GetBaseWithConstantOffset(Value *Ptr, int64_t &Offset,
Chris Lattneraae7fcb2009-09-21 06:48:08 +0000956 const TargetData &TD) {
Chris Lattner8f912082009-09-21 06:24:16 +0000957 Operator *PtrOp = dyn_cast<Operator>(Ptr);
958 if (PtrOp == 0) return Ptr;
959
960 // Just look through bitcasts.
961 if (PtrOp->getOpcode() == Instruction::BitCast)
962 return GetBaseWithConstantOffset(PtrOp->getOperand(0), Offset, TD);
963
964 // If this is a GEP with constant indices, we can look through it.
965 GEPOperator *GEP = dyn_cast<GEPOperator>(PtrOp);
966 if (GEP == 0 || !GEP->hasAllConstantIndices()) return Ptr;
967
968 gep_type_iterator GTI = gep_type_begin(GEP);
969 for (User::op_iterator I = GEP->idx_begin(), E = GEP->idx_end(); I != E;
970 ++I, ++GTI) {
971 ConstantInt *OpC = cast<ConstantInt>(*I);
972 if (OpC->isZero()) continue;
973
974 // Handle a struct and array indices which add their offset to the pointer.
975 if (const StructType *STy = dyn_cast<StructType>(*GTI)) {
Chris Lattneraae7fcb2009-09-21 06:48:08 +0000976 Offset += TD.getStructLayout(STy)->getElementOffset(OpC->getZExtValue());
Chris Lattner8f912082009-09-21 06:24:16 +0000977 } else {
Chris Lattneraae7fcb2009-09-21 06:48:08 +0000978 uint64_t Size = TD.getTypeAllocSize(GTI.getIndexedType());
Chris Lattner8f912082009-09-21 06:24:16 +0000979 Offset += OpC->getSExtValue()*Size;
980 }
981 }
982
983 // Re-sign extend from the pointer size if needed to get overflow edge cases
984 // right.
Chris Lattneraae7fcb2009-09-21 06:48:08 +0000985 unsigned PtrSize = TD.getPointerSizeInBits();
Chris Lattner8f912082009-09-21 06:24:16 +0000986 if (PtrSize < 64)
987 Offset = (Offset << (64-PtrSize)) >> (64-PtrSize);
988
989 return GetBaseWithConstantOffset(GEP->getPointerOperand(), Offset, TD);
990}
991
992
Chris Lattnercb00f732009-12-06 01:57:02 +0000993/// AnalyzeLoadFromClobberingWrite - This function is called when we have a
994/// memdep query of a load that ends up being a clobbering memory write (store,
995/// memset, memcpy, memmove). This means that the write *may* provide bits used
996/// by the load but we can't be sure because the pointers don't mustalias.
997///
998/// Check this case to see if there is anything more we can do before we give
999/// up. This returns -1 if we have to give up, or a byte number in the stored
1000/// value of the piece that feeds the load.
Chris Lattner598abfd2009-12-09 07:34:10 +00001001static int AnalyzeLoadFromClobberingWrite(const Type *LoadTy, Value *LoadPtr,
1002 Value *WritePtr,
Chris Lattnercb00f732009-12-06 01:57:02 +00001003 uint64_t WriteSizeInBits,
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001004 const TargetData &TD) {
Chris Lattner012b3602009-09-21 17:24:04 +00001005 // If the loaded or stored value is an first class array or struct, don't try
1006 // to transform them. We need to be able to bitcast to integer.
Chris Lattner598abfd2009-12-09 07:34:10 +00001007 if (isa<StructType>(LoadTy) || isa<ArrayType>(LoadTy))
Chris Lattner012b3602009-09-21 17:24:04 +00001008 return -1;
1009
Chris Lattner8f912082009-09-21 06:24:16 +00001010 int64_t StoreOffset = 0, LoadOffset = 0;
Chris Lattnercb00f732009-12-06 01:57:02 +00001011 Value *StoreBase = GetBaseWithConstantOffset(WritePtr, StoreOffset, TD);
Chris Lattner8f912082009-09-21 06:24:16 +00001012 Value *LoadBase =
Chris Lattner598abfd2009-12-09 07:34:10 +00001013 GetBaseWithConstantOffset(LoadPtr, LoadOffset, TD);
Chris Lattner8f912082009-09-21 06:24:16 +00001014 if (StoreBase != LoadBase)
1015 return -1;
1016
1017 // If the load and store are to the exact same address, they should have been
1018 // a must alias. AA must have gotten confused.
1019 // FIXME: Study to see if/when this happens.
1020 if (LoadOffset == StoreOffset) {
1021#if 0
1022 errs() << "STORE/LOAD DEP WITH COMMON POINTER MISSED:\n"
1023 << "Base = " << *StoreBase << "\n"
Chris Lattnercb00f732009-12-06 01:57:02 +00001024 << "Store Ptr = " << *WritePtr << "\n"
1025 << "Store Offs = " << StoreOffset << "\n"
Chris Lattner598abfd2009-12-09 07:34:10 +00001026 << "Load Ptr = " << *LoadPtr << "\n"
Chris Lattner8f912082009-09-21 06:24:16 +00001027 << "Load Offs = " << LoadOffset << " - " << *L << "\n\n";
Chris Lattner4c619092009-12-09 02:41:54 +00001028 abort();
Chris Lattner8f912082009-09-21 06:24:16 +00001029#endif
1030 return -1;
1031 }
1032
1033 // If the load and store don't overlap at all, the store doesn't provide
1034 // anything to the load. In this case, they really don't alias at all, AA
1035 // must have gotten confused.
1036 // FIXME: Investigate cases where this bails out, e.g. rdar://7238614. Then
1037 // remove this check, as it is duplicated with what we have below.
Chris Lattner598abfd2009-12-09 07:34:10 +00001038 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy);
Chris Lattner8f912082009-09-21 06:24:16 +00001039
Chris Lattnercb00f732009-12-06 01:57:02 +00001040 if ((WriteSizeInBits & 7) | (LoadSize & 7))
Chris Lattner8f912082009-09-21 06:24:16 +00001041 return -1;
Chris Lattnercb00f732009-12-06 01:57:02 +00001042 uint64_t StoreSize = WriteSizeInBits >> 3; // Convert to bytes.
Chris Lattner8f912082009-09-21 06:24:16 +00001043 LoadSize >>= 3;
1044
1045
1046 bool isAAFailure = false;
1047 if (StoreOffset < LoadOffset) {
1048 isAAFailure = StoreOffset+int64_t(StoreSize) <= LoadOffset;
1049 } else {
1050 isAAFailure = LoadOffset+int64_t(LoadSize) <= StoreOffset;
1051 }
1052 if (isAAFailure) {
1053#if 0
1054 errs() << "STORE LOAD DEP WITH COMMON BASE:\n"
1055 << "Base = " << *StoreBase << "\n"
Chris Lattnercb00f732009-12-06 01:57:02 +00001056 << "Store Ptr = " << *WritePtr << "\n"
1057 << "Store Offs = " << StoreOffset << "\n"
Chris Lattner8f912082009-09-21 06:24:16 +00001058 << "Load Ptr = " << *L->getPointerOperand() << "\n"
1059 << "Load Offs = " << LoadOffset << " - " << *L << "\n\n";
1060 errs() << "'" << L->getParent()->getParent()->getName() << "'"
1061 << *L->getParent();
Chris Lattner4c619092009-12-09 02:41:54 +00001062 abort();
Chris Lattner8f912082009-09-21 06:24:16 +00001063#endif
1064 return -1;
1065 }
1066
1067 // If the Load isn't completely contained within the stored bits, we don't
1068 // have all the bits to feed it. We could do something crazy in the future
1069 // (issue a smaller load then merge the bits in) but this seems unlikely to be
1070 // valuable.
1071 if (StoreOffset > LoadOffset ||
1072 StoreOffset+StoreSize < LoadOffset+LoadSize)
1073 return -1;
1074
1075 // Okay, we can do this transformation. Return the number of bytes into the
1076 // store that the load is.
1077 return LoadOffset-StoreOffset;
1078}
1079
Chris Lattnercb00f732009-12-06 01:57:02 +00001080/// AnalyzeLoadFromClobberingStore - This function is called when we have a
1081/// memdep query of a load that ends up being a clobbering store.
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001082static int AnalyzeLoadFromClobberingStore(const Type *LoadTy, Value *LoadPtr,
1083 StoreInst *DepSI,
Chris Lattnercb00f732009-12-06 01:57:02 +00001084 const TargetData &TD) {
1085 // Cannot handle reading from store of first-class aggregate yet.
1086 if (isa<StructType>(DepSI->getOperand(0)->getType()) ||
1087 isa<ArrayType>(DepSI->getOperand(0)->getType()))
1088 return -1;
1089
1090 Value *StorePtr = DepSI->getPointerOperand();
1091 uint64_t StoreSize = TD.getTypeSizeInBits(StorePtr->getType());
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001092 return AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr,
Chris Lattner598abfd2009-12-09 07:34:10 +00001093 StorePtr, StoreSize, TD);
Chris Lattnercb00f732009-12-06 01:57:02 +00001094}
1095
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001096static int AnalyzeLoadFromClobberingMemInst(const Type *LoadTy, Value *LoadPtr,
1097 MemIntrinsic *MI,
Chris Lattnercb00f732009-12-06 01:57:02 +00001098 const TargetData &TD) {
1099 // If the mem operation is a non-constant size, we can't handle it.
1100 ConstantInt *SizeCst = dyn_cast<ConstantInt>(MI->getLength());
1101 if (SizeCst == 0) return -1;
1102 uint64_t MemSizeInBits = SizeCst->getZExtValue()*8;
Chris Lattner4bb632f2009-12-06 05:29:56 +00001103
1104 // If this is memset, we just need to see if the offset is valid in the size
1105 // of the memset..
Chris Lattnercb00f732009-12-06 01:57:02 +00001106 if (MI->getIntrinsicID() == Intrinsic::memset)
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001107 return AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr, MI->getDest(),
1108 MemSizeInBits, TD);
Chris Lattnercb00f732009-12-06 01:57:02 +00001109
Chris Lattner4bb632f2009-12-06 05:29:56 +00001110 // If we have a memcpy/memmove, the only case we can handle is if this is a
1111 // copy from constant memory. In that case, we can read directly from the
1112 // constant memory.
1113 MemTransferInst *MTI = cast<MemTransferInst>(MI);
1114
1115 Constant *Src = dyn_cast<Constant>(MTI->getSource());
1116 if (Src == 0) return -1;
1117
1118 GlobalVariable *GV = dyn_cast<GlobalVariable>(Src->getUnderlyingObject());
1119 if (GV == 0 || !GV->isConstant()) return -1;
1120
1121 // See if the access is within the bounds of the transfer.
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001122 int Offset = AnalyzeLoadFromClobberingWrite(LoadTy, LoadPtr,
1123 MI->getDest(), MemSizeInBits, TD);
Chris Lattner4bb632f2009-12-06 05:29:56 +00001124 if (Offset == -1)
1125 return Offset;
1126
1127 // Otherwise, see if we can constant fold a load from the constant with the
1128 // offset applied as appropriate.
1129 Src = ConstantExpr::getBitCast(Src,
1130 llvm::Type::getInt8PtrTy(Src->getContext()));
1131 Constant *OffsetCst =
1132 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1133 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001134 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(LoadTy));
Chris Lattner4bb632f2009-12-06 05:29:56 +00001135 if (ConstantFoldLoadFromConstPtr(Src, &TD))
1136 return Offset;
Chris Lattnercb00f732009-12-06 01:57:02 +00001137 return -1;
1138}
1139
Chris Lattner8f912082009-09-21 06:24:16 +00001140
1141/// GetStoreValueForLoad - This function is called when we have a
1142/// memdep query of a load that ends up being a clobbering store. This means
1143/// that the store *may* provide bits used by the load but we can't be sure
1144/// because the pointers don't mustalias. Check this case to see if there is
1145/// anything more we can do before we give up.
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001146static Value *GetStoreValueForLoad(Value *SrcVal, unsigned Offset,
1147 const Type *LoadTy,
1148 Instruction *InsertPt, const TargetData &TD){
Chris Lattner8f912082009-09-21 06:24:16 +00001149 LLVMContext &Ctx = SrcVal->getType()->getContext();
1150
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001151 uint64_t StoreSize = TD.getTypeSizeInBits(SrcVal->getType())/8;
1152 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy)/8;
Chris Lattner8f912082009-09-21 06:24:16 +00001153
Chris Lattner2737cb42009-12-09 18:13:28 +00001154 IRBuilder<> Builder(InsertPt->getParent(), InsertPt);
Chris Lattner8f912082009-09-21 06:24:16 +00001155
1156 // Compute which bits of the stored value are being used by the load. Convert
1157 // to an integer type to start with.
1158 if (isa<PointerType>(SrcVal->getType()))
Chris Lattner2737cb42009-12-09 18:13:28 +00001159 SrcVal = Builder.CreatePtrToInt(SrcVal, TD.getIntPtrType(Ctx), "tmp");
Chris Lattner8f912082009-09-21 06:24:16 +00001160 if (!isa<IntegerType>(SrcVal->getType()))
Chris Lattner2737cb42009-12-09 18:13:28 +00001161 SrcVal = Builder.CreateBitCast(SrcVal, IntegerType::get(Ctx, StoreSize*8),
1162 "tmp");
Chris Lattner8f912082009-09-21 06:24:16 +00001163
1164 // Shift the bits to the least significant depending on endianness.
1165 unsigned ShiftAmt;
Chris Lattnercb00f732009-12-06 01:57:02 +00001166 if (TD.isLittleEndian())
Chris Lattner8f912082009-09-21 06:24:16 +00001167 ShiftAmt = Offset*8;
Chris Lattnercb00f732009-12-06 01:57:02 +00001168 else
Chris Lattner1846fa02009-09-21 17:55:47 +00001169 ShiftAmt = (StoreSize-LoadSize-Offset)*8;
Chris Lattner8f912082009-09-21 06:24:16 +00001170
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001171 if (ShiftAmt)
Chris Lattner2737cb42009-12-09 18:13:28 +00001172 SrcVal = Builder.CreateLShr(SrcVal, ShiftAmt, "tmp");
Chris Lattner8f912082009-09-21 06:24:16 +00001173
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001174 if (LoadSize != StoreSize)
Chris Lattner2737cb42009-12-09 18:13:28 +00001175 SrcVal = Builder.CreateTrunc(SrcVal, IntegerType::get(Ctx, LoadSize*8),
1176 "tmp");
Chris Lattner8f912082009-09-21 06:24:16 +00001177
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001178 return CoerceAvailableValueToLoadType(SrcVal, LoadTy, InsertPt, TD);
Chris Lattner8f912082009-09-21 06:24:16 +00001179}
1180
Chris Lattnercb00f732009-12-06 01:57:02 +00001181/// GetMemInstValueForLoad - This function is called when we have a
1182/// memdep query of a load that ends up being a clobbering mem intrinsic.
1183static Value *GetMemInstValueForLoad(MemIntrinsic *SrcInst, unsigned Offset,
1184 const Type *LoadTy, Instruction *InsertPt,
1185 const TargetData &TD){
1186 LLVMContext &Ctx = LoadTy->getContext();
1187 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy)/8;
1188
1189 IRBuilder<> Builder(InsertPt->getParent(), InsertPt);
1190
1191 // We know that this method is only called when the mem transfer fully
1192 // provides the bits for the load.
1193 if (MemSetInst *MSI = dyn_cast<MemSetInst>(SrcInst)) {
1194 // memset(P, 'x', 1234) -> splat('x'), even if x is a variable, and
1195 // independently of what the offset is.
1196 Value *Val = MSI->getValue();
1197 if (LoadSize != 1)
1198 Val = Builder.CreateZExt(Val, IntegerType::get(Ctx, LoadSize*8));
1199
1200 Value *OneElt = Val;
1201
1202 // Splat the value out to the right number of bits.
1203 for (unsigned NumBytesSet = 1; NumBytesSet != LoadSize; ) {
1204 // If we can double the number of bytes set, do it.
1205 if (NumBytesSet*2 <= LoadSize) {
1206 Value *ShVal = Builder.CreateShl(Val, NumBytesSet*8);
1207 Val = Builder.CreateOr(Val, ShVal);
1208 NumBytesSet <<= 1;
1209 continue;
1210 }
1211
1212 // Otherwise insert one byte at a time.
1213 Value *ShVal = Builder.CreateShl(Val, 1*8);
1214 Val = Builder.CreateOr(OneElt, ShVal);
1215 ++NumBytesSet;
1216 }
1217
1218 return CoerceAvailableValueToLoadType(Val, LoadTy, InsertPt, TD);
1219 }
Chris Lattner4bb632f2009-12-06 05:29:56 +00001220
1221 // Otherwise, this is a memcpy/memmove from a constant global.
1222 MemTransferInst *MTI = cast<MemTransferInst>(SrcInst);
1223 Constant *Src = cast<Constant>(MTI->getSource());
1224
1225 // Otherwise, see if we can constant fold a load from the constant with the
1226 // offset applied as appropriate.
1227 Src = ConstantExpr::getBitCast(Src,
1228 llvm::Type::getInt8PtrTy(Src->getContext()));
1229 Constant *OffsetCst =
1230 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1231 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
1232 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(LoadTy));
1233 return ConstantFoldLoadFromConstPtr(Src, &TD);
Chris Lattnercb00f732009-12-06 01:57:02 +00001234}
1235
1236
1237
Chris Lattner19b84b32009-09-21 06:30:24 +00001238struct AvailableValueInBlock {
1239 /// BB - The basic block in question.
1240 BasicBlock *BB;
Chris Lattnera96e53a2009-12-06 04:54:31 +00001241 enum ValType {
1242 SimpleVal, // A simple offsetted value that is accessed.
1243 MemIntrin // A memory intrinsic which is loaded from.
1244 };
1245
Chris Lattner19b84b32009-09-21 06:30:24 +00001246 /// V - The value that is live out of the block.
Chris Lattnera96e53a2009-12-06 04:54:31 +00001247 PointerIntPair<Value *, 1, ValType> Val;
1248
1249 /// Offset - The byte offset in Val that is interesting for the load query.
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001250 unsigned Offset;
Chris Lattner19b84b32009-09-21 06:30:24 +00001251
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001252 static AvailableValueInBlock get(BasicBlock *BB, Value *V,
1253 unsigned Offset = 0) {
Chris Lattner19b84b32009-09-21 06:30:24 +00001254 AvailableValueInBlock Res;
1255 Res.BB = BB;
Chris Lattnera96e53a2009-12-06 04:54:31 +00001256 Res.Val.setPointer(V);
1257 Res.Val.setInt(SimpleVal);
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001258 Res.Offset = Offset;
Chris Lattner19b84b32009-09-21 06:30:24 +00001259 return Res;
1260 }
Chris Lattnera96e53a2009-12-06 04:54:31 +00001261
1262 static AvailableValueInBlock getMI(BasicBlock *BB, MemIntrinsic *MI,
1263 unsigned Offset = 0) {
1264 AvailableValueInBlock Res;
1265 Res.BB = BB;
1266 Res.Val.setPointer(MI);
1267 Res.Val.setInt(MemIntrin);
1268 Res.Offset = Offset;
1269 return Res;
1270 }
1271
1272 bool isSimpleValue() const { return Val.getInt() == SimpleVal; }
1273 Value *getSimpleValue() const {
1274 assert(isSimpleValue() && "Wrong accessor");
1275 return Val.getPointer();
1276 }
1277
1278 MemIntrinsic *getMemIntrinValue() const {
1279 assert(!isSimpleValue() && "Wrong accessor");
1280 return cast<MemIntrinsic>(Val.getPointer());
1281 }
Chris Lattner19b84b32009-09-21 06:30:24 +00001282};
1283
Chris Lattner6e5ea272009-10-10 23:50:30 +00001284/// ConstructSSAForLoadSet - Given a set of loads specified by ValuesPerBlock,
1285/// construct SSA form, allowing us to eliminate LI. This returns the value
1286/// that should be used at LI's definition site.
1287static Value *ConstructSSAForLoadSet(LoadInst *LI,
1288 SmallVectorImpl<AvailableValueInBlock> &ValuesPerBlock,
1289 const TargetData *TD,
1290 AliasAnalysis *AA) {
1291 SmallVector<PHINode*, 8> NewPHIs;
1292 SSAUpdater SSAUpdate(&NewPHIs);
1293 SSAUpdate.Initialize(LI);
1294
1295 const Type *LoadTy = LI->getType();
1296
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001297 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
Chris Lattnera96e53a2009-12-06 04:54:31 +00001298 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1299 BasicBlock *BB = AV.BB;
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001300
Chris Lattner6e5ea272009-10-10 23:50:30 +00001301 if (SSAUpdate.HasValueForBlock(BB))
1302 continue;
Chris Lattnera96e53a2009-12-06 04:54:31 +00001303
1304 unsigned Offset = AV.Offset;
1305
1306 Value *AvailableVal;
1307 if (AV.isSimpleValue()) {
1308 AvailableVal = AV.getSimpleValue();
1309 if (AvailableVal->getType() != LoadTy) {
1310 assert(TD && "Need target data to handle type mismatch case");
1311 AvailableVal = GetStoreValueForLoad(AvailableVal, Offset, LoadTy,
1312 BB->getTerminator(), *TD);
1313
1314 DEBUG(errs() << "GVN COERCED NONLOCAL VAL:\nOffset: " << Offset << " "
1315 << *AV.getSimpleValue() << '\n'
Chris Lattner6e5ea272009-10-10 23:50:30 +00001316 << *AvailableVal << '\n' << "\n\n\n");
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001317 }
Chris Lattnera96e53a2009-12-06 04:54:31 +00001318 } else {
1319 AvailableVal = GetMemInstValueForLoad(AV.getMemIntrinValue(), Offset,
1320 LoadTy, BB->getTerminator(), *TD);
1321 DEBUG(errs() << "GVN COERCED NONLOCAL MEM INTRIN:\nOffset: " << Offset
1322 << " " << *AV.getMemIntrinValue() << '\n'
Chris Lattner6e5ea272009-10-10 23:50:30 +00001323 << *AvailableVal << '\n' << "\n\n\n");
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001324 }
Chris Lattner6e5ea272009-10-10 23:50:30 +00001325 SSAUpdate.AddAvailableValue(BB, AvailableVal);
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001326 }
Chris Lattner6e5ea272009-10-10 23:50:30 +00001327
1328 // Perform PHI construction.
1329 Value *V = SSAUpdate.GetValueInMiddleOfBlock(LI->getParent());
1330
1331 // If new PHI nodes were created, notify alias analysis.
1332 if (isa<PointerType>(V->getType()))
1333 for (unsigned i = 0, e = NewPHIs.size(); i != e; ++i)
1334 AA->copyValue(LI, NewPHIs[i]);
1335
1336 return V;
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001337}
1338
Owen Andersonf187daf2009-12-02 07:35:19 +00001339static bool isLifetimeStart(Instruction *Inst) {
Chris Lattnerbc6fccc2009-12-02 06:44:58 +00001340 if (IntrinsicInst* II = dyn_cast<IntrinsicInst>(Inst))
Owen Andersonf187daf2009-12-02 07:35:19 +00001341 return II->getIntrinsicID() == Intrinsic::lifetime_start;
Chris Lattnerbc6fccc2009-12-02 06:44:58 +00001342 return false;
1343}
1344
Owen Andersone0143452007-08-16 22:02:55 +00001345/// processNonLocalLoad - Attempt to eliminate a load whose dependencies are
1346/// non-local by performing PHI construction.
Chris Lattnerdcded152008-12-02 08:16:11 +00001347bool GVN::processNonLocalLoad(LoadInst *LI,
Chris Lattner7de20452008-03-21 22:01:16 +00001348 SmallVectorImpl<Instruction*> &toErase) {
Chris Lattnerdcded152008-12-02 08:16:11 +00001349 // Find the non-local dependencies of the load.
Chris Lattner1a957962009-12-09 07:08:01 +00001350 SmallVector<NonLocalDepEntry, 64> Deps;
Chris Lattneraf713862008-12-09 19:25:07 +00001351 MD->getNonLocalPointerDependency(LI->getOperand(0), true, LI->getParent(),
1352 Deps);
Dan Gohman7e124382009-07-31 20:24:18 +00001353 //DEBUG(errs() << "INVESTIGATING NONLOCAL LOAD: "
1354 // << Deps.size() << *LI << '\n');
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001355
Owen Anderson90e717d2008-08-26 22:07:42 +00001356 // If we had to process more than one hundred blocks to find the
1357 // dependencies, this load isn't worth worrying about. Optimizing
1358 // it will be too expensive.
Chris Lattneraf713862008-12-09 19:25:07 +00001359 if (Deps.size() > 100)
Owen Anderson90e717d2008-08-26 22:07:42 +00001360 return false;
Chris Lattner8d1686f2008-12-18 00:51:32 +00001361
1362 // If we had a phi translation failure, we'll have a single entry which is a
1363 // clobber in the current block. Reject this early.
Chris Lattner1a957962009-12-09 07:08:01 +00001364 if (Deps.size() == 1 && Deps[0].getResult().isClobber()) {
Edwin Török3ffffac2009-06-17 18:48:18 +00001365 DEBUG(
Dan Gohman0be10b02009-07-25 01:43:01 +00001366 errs() << "GVN: non-local load ";
1367 WriteAsOperand(errs(), LI);
Chris Lattner1a957962009-12-09 07:08:01 +00001368 errs() << " is clobbered by " << *Deps[0].getResult().getInst() << '\n';
Edwin Török3ffffac2009-06-17 18:48:18 +00001369 );
Chris Lattner8d1686f2008-12-18 00:51:32 +00001370 return false;
Edwin Török3ffffac2009-06-17 18:48:18 +00001371 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001372
Chris Lattnerdcded152008-12-02 08:16:11 +00001373 // Filter out useless results (non-locals, etc). Keep track of the blocks
1374 // where we have a value available in repl, also keep track of whether we see
1375 // dependencies that produce an unknown value for the load (such as a call
1376 // that could potentially clobber the load).
Chris Lattner19b84b32009-09-21 06:30:24 +00001377 SmallVector<AvailableValueInBlock, 16> ValuesPerBlock;
Chris Lattnerdcded152008-12-02 08:16:11 +00001378 SmallVector<BasicBlock*, 16> UnavailableBlocks;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001379
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001380 const TargetData *TD = 0;
1381
Chris Lattneraf713862008-12-09 19:25:07 +00001382 for (unsigned i = 0, e = Deps.size(); i != e; ++i) {
Chris Lattner1a957962009-12-09 07:08:01 +00001383 BasicBlock *DepBB = Deps[i].getBB();
1384 MemDepResult DepInfo = Deps[i].getResult();
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001385
Chris Lattner4531da82008-12-05 21:04:20 +00001386 if (DepInfo.isClobber()) {
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001387 // If the dependence is to a store that writes to a superset of the bits
1388 // read by the load, we can extract the bits we need for the load from the
1389 // stored value.
1390 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInfo.getInst())) {
1391 if (TD == 0)
1392 TD = getAnalysisIfAvailable<TargetData>();
1393 if (TD) {
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001394 int Offset = AnalyzeLoadFromClobberingStore(LI->getType(),
1395 LI->getPointerOperand(),
1396 DepSI, *TD);
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001397 if (Offset != -1) {
1398 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1399 DepSI->getOperand(0),
1400 Offset));
1401 continue;
1402 }
1403 }
1404 }
Chris Lattnercb00f732009-12-06 01:57:02 +00001405
Chris Lattnercb00f732009-12-06 01:57:02 +00001406 // If the clobbering value is a memset/memcpy/memmove, see if we can
1407 // forward a value on from it.
Chris Lattnera96e53a2009-12-06 04:54:31 +00001408 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(DepInfo.getInst())) {
Chris Lattnercb00f732009-12-06 01:57:02 +00001409 if (TD == 0)
1410 TD = getAnalysisIfAvailable<TargetData>();
1411 if (TD) {
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001412 int Offset = AnalyzeLoadFromClobberingMemInst(LI->getType(),
1413 LI->getPointerOperand(),
1414 DepMI, *TD);
Chris Lattnera96e53a2009-12-06 04:54:31 +00001415 if (Offset != -1) {
1416 ValuesPerBlock.push_back(AvailableValueInBlock::getMI(DepBB, DepMI,
1417 Offset));
1418 continue;
1419 }
Chris Lattnercb00f732009-12-06 01:57:02 +00001420 }
1421 }
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001422
Chris Lattner4531da82008-12-05 21:04:20 +00001423 UnavailableBlocks.push_back(DepBB);
1424 continue;
1425 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001426
Chris Lattner4531da82008-12-05 21:04:20 +00001427 Instruction *DepInst = DepInfo.getInst();
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001428
Chris Lattner4531da82008-12-05 21:04:20 +00001429 // Loading the allocation -> undef.
Chris Lattnerbc6fccc2009-12-02 06:44:58 +00001430 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst) ||
Owen Andersonf187daf2009-12-02 07:35:19 +00001431 // Loading immediately after lifetime begin -> undef.
1432 isLifetimeStart(DepInst)) {
Chris Lattner19b84b32009-09-21 06:30:24 +00001433 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1434 UndefValue::get(LI->getType())));
Chris Lattner46876282008-12-01 01:15:42 +00001435 continue;
1436 }
Owen Andersonc07861a2009-10-28 07:05:35 +00001437
Chris Lattner19b84b32009-09-21 06:30:24 +00001438 if (StoreInst *S = dyn_cast<StoreInst>(DepInst)) {
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001439 // Reject loads and stores that are to the same address but are of
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001440 // different types if we have to.
Chris Lattnerdcded152008-12-02 08:16:11 +00001441 if (S->getOperand(0)->getType() != LI->getType()) {
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001442 if (TD == 0)
1443 TD = getAnalysisIfAvailable<TargetData>();
1444
1445 // If the stored value is larger or equal to the loaded value, we can
1446 // reuse it.
Chris Lattner012b3602009-09-21 17:24:04 +00001447 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(S->getOperand(0),
1448 LI->getType(), *TD)) {
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001449 UnavailableBlocks.push_back(DepBB);
1450 continue;
1451 }
Chris Lattnerdcded152008-12-02 08:16:11 +00001452 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001453
Chris Lattner19b84b32009-09-21 06:30:24 +00001454 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1455 S->getOperand(0)));
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001456 continue;
1457 }
1458
1459 if (LoadInst *LD = dyn_cast<LoadInst>(DepInst)) {
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001460 // If the types mismatch and we can't handle it, reject reuse of the load.
Chris Lattnerdcded152008-12-02 08:16:11 +00001461 if (LD->getType() != LI->getType()) {
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001462 if (TD == 0)
1463 TD = getAnalysisIfAvailable<TargetData>();
1464
1465 // If the stored value is larger or equal to the loaded value, we can
1466 // reuse it.
Chris Lattner012b3602009-09-21 17:24:04 +00001467 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(LD, LI->getType(),*TD)){
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001468 UnavailableBlocks.push_back(DepBB);
1469 continue;
1470 }
Chris Lattnerdcded152008-12-02 08:16:11 +00001471 }
Chris Lattner19b84b32009-09-21 06:30:24 +00001472 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB, LD));
Chris Lattnerdcded152008-12-02 08:16:11 +00001473 continue;
Owen Anderson5d72a422007-07-25 19:57:03 +00001474 }
Chris Lattneraae7fcb2009-09-21 06:48:08 +00001475
1476 UnavailableBlocks.push_back(DepBB);
1477 continue;
Chris Lattner3d7103e2008-03-21 21:14:38 +00001478 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001479
Chris Lattnerdcded152008-12-02 08:16:11 +00001480 // If we have no predecessors that produce a known value for this load, exit
1481 // early.
1482 if (ValuesPerBlock.empty()) return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001483
Chris Lattnerdcded152008-12-02 08:16:11 +00001484 // If all of the instructions we depend on produce a known value for this
1485 // load, then it is fully redundant and we can use PHI insertion to compute
1486 // its value. Insert PHIs and remove the fully redundant value now.
1487 if (UnavailableBlocks.empty()) {
Dan Gohman7e124382009-07-31 20:24:18 +00001488 DEBUG(errs() << "GVN REMOVING NONLOCAL LOAD: " << *LI << '\n');
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001489
Chris Lattnerdcded152008-12-02 08:16:11 +00001490 // Perform PHI construction.
Chris Lattner6e5ea272009-10-10 23:50:30 +00001491 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD,
1492 VN.getAliasAnalysis());
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001493 LI->replaceAllUsesWith(V);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001494
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001495 if (isa<PHINode>(V))
1496 V->takeName(LI);
1497 if (isa<PointerType>(V->getType()))
1498 MD->invalidateCachedPointerInfo(V);
Chris Lattnerdcded152008-12-02 08:16:11 +00001499 toErase.push_back(LI);
1500 NumGVNLoad++;
1501 return true;
1502 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001503
Chris Lattnerdcded152008-12-02 08:16:11 +00001504 if (!EnablePRE || !EnableLoadPRE)
1505 return false;
1506
1507 // Okay, we have *some* definitions of the value. This means that the value
1508 // is available in some of our (transitive) predecessors. Lets think about
1509 // doing PRE of this load. This will involve inserting a new load into the
1510 // predecessor when it's not available. We could do this in general, but
1511 // prefer to not increase code size. As such, we only do this when we know
1512 // that we only have to insert *one* load (which means we're basically moving
1513 // the load, not inserting a new one).
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001514
Owen Andersondd37b182009-05-31 09:03:40 +00001515 SmallPtrSet<BasicBlock *, 4> Blockers;
1516 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1517 Blockers.insert(UnavailableBlocks[i]);
1518
1519 // Lets find first basic block with more than one predecessor. Walk backwards
1520 // through predecessors if needed.
Chris Lattnerdcded152008-12-02 08:16:11 +00001521 BasicBlock *LoadBB = LI->getParent();
Owen Andersondd37b182009-05-31 09:03:40 +00001522 BasicBlock *TmpBB = LoadBB;
1523
1524 bool isSinglePred = false;
Dale Johannesena19b67f2009-06-17 20:48:23 +00001525 bool allSingleSucc = true;
Owen Andersondd37b182009-05-31 09:03:40 +00001526 while (TmpBB->getSinglePredecessor()) {
1527 isSinglePred = true;
1528 TmpBB = TmpBB->getSinglePredecessor();
1529 if (!TmpBB) // If haven't found any, bail now.
1530 return false;
1531 if (TmpBB == LoadBB) // Infinite (unreachable) loop.
1532 return false;
1533 if (Blockers.count(TmpBB))
1534 return false;
Dale Johannesena19b67f2009-06-17 20:48:23 +00001535 if (TmpBB->getTerminator()->getNumSuccessors() != 1)
1536 allSingleSucc = false;
Owen Andersondd37b182009-05-31 09:03:40 +00001537 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001538
Owen Andersondd37b182009-05-31 09:03:40 +00001539 assert(TmpBB);
1540 LoadBB = TmpBB;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001541
Chris Lattnerdcded152008-12-02 08:16:11 +00001542 // If we have a repl set with LI itself in it, this means we have a loop where
1543 // at least one of the values is LI. Since this means that we won't be able
1544 // to eliminate LI even if we insert uses in the other predecessors, we will
1545 // end up increasing code size. Reject this by scanning for LI.
1546 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i)
Chris Lattnera96e53a2009-12-06 04:54:31 +00001547 if (ValuesPerBlock[i].isSimpleValue() &&
1548 ValuesPerBlock[i].getSimpleValue() == LI)
Chris Lattnerdcded152008-12-02 08:16:11 +00001549 return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001550
Chris Lattnera96e53a2009-12-06 04:54:31 +00001551 // FIXME: It is extremely unclear what this loop is doing, other than
1552 // artificially restricting loadpre.
Owen Andersondd37b182009-05-31 09:03:40 +00001553 if (isSinglePred) {
1554 bool isHot = false;
Chris Lattnera96e53a2009-12-06 04:54:31 +00001555 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
1556 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1557 if (AV.isSimpleValue())
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001558 // "Hot" Instruction is in some loop (because it dominates its dep.
1559 // instruction).
Chris Lattnera96e53a2009-12-06 04:54:31 +00001560 if (Instruction *I = dyn_cast<Instruction>(AV.getSimpleValue()))
1561 if (DT->dominates(LI, I)) {
1562 isHot = true;
1563 break;
1564 }
1565 }
Owen Andersondd37b182009-05-31 09:03:40 +00001566
1567 // We are interested only in "hot" instructions. We don't want to do any
1568 // mis-optimizations here.
1569 if (!isHot)
1570 return false;
1571 }
1572
Chris Lattnerdcded152008-12-02 08:16:11 +00001573 // Okay, we have some hope :). Check to see if the loaded value is fully
1574 // available in all but one predecessor.
1575 // FIXME: If we could restructure the CFG, we could make a common pred with
1576 // all the preds that don't have an available LI and insert a new load into
1577 // that one block.
1578 BasicBlock *UnavailablePred = 0;
1579
Chris Lattner159b98f2008-12-05 07:49:08 +00001580 DenseMap<BasicBlock*, char> FullyAvailableBlocks;
Chris Lattnerdcded152008-12-02 08:16:11 +00001581 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i)
Chris Lattner19b84b32009-09-21 06:30:24 +00001582 FullyAvailableBlocks[ValuesPerBlock[i].BB] = true;
Chris Lattnerdcded152008-12-02 08:16:11 +00001583 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1584 FullyAvailableBlocks[UnavailableBlocks[i]] = false;
1585
1586 for (pred_iterator PI = pred_begin(LoadBB), E = pred_end(LoadBB);
1587 PI != E; ++PI) {
1588 if (IsValueFullyAvailableInBlock(*PI, FullyAvailableBlocks))
1589 continue;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001590
Chris Lattnerdcded152008-12-02 08:16:11 +00001591 // If this load is not available in multiple predecessors, reject it.
1592 if (UnavailablePred && UnavailablePred != *PI)
1593 return false;
1594 UnavailablePred = *PI;
1595 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001596
Chris Lattnerdcded152008-12-02 08:16:11 +00001597 assert(UnavailablePred != 0 &&
1598 "Fully available value should be eliminated above!");
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001599
Chris Lattnerdcded152008-12-02 08:16:11 +00001600 // We don't currently handle critical edges :(
1601 if (UnavailablePred->getTerminator()->getNumSuccessors() != 1) {
Daniel Dunbar005975c2009-07-25 00:23:56 +00001602 DEBUG(errs() << "COULD NOT PRE LOAD BECAUSE OF CRITICAL EDGE '"
Dan Gohman7e124382009-07-31 20:24:18 +00001603 << UnavailablePred->getName() << "': " << *LI << '\n');
Chris Lattnerdcded152008-12-02 08:16:11 +00001604 return false;
Owen Anderson5b299672007-08-07 23:12:31 +00001605 }
Chris Lattner248818e2009-11-27 08:25:10 +00001606
Chris Lattnera5bef152009-11-27 22:05:15 +00001607 // Do PHI translation to get its value in the predecessor if necessary. The
1608 // returned pointer (if non-null) is guaranteed to dominate UnavailablePred.
1609 //
Chris Lattner1c2de2b2009-11-28 15:39:14 +00001610 SmallVector<Instruction*, 8> NewInsts;
Chris Lattnerde0b0302009-11-27 22:50:07 +00001611
Chris Lattner80c535b2009-11-28 16:08:18 +00001612 // If all preds have a single successor, then we know it is safe to insert the
1613 // load on the pred (?!?), so we can insert code to materialize the pointer if
1614 // it is not available.
Chris Lattnerefff3222009-12-09 01:59:31 +00001615 PHITransAddr Address(LI->getOperand(0), TD);
1616 Value *LoadPtr = 0;
Chris Lattner80c535b2009-11-28 16:08:18 +00001617 if (allSingleSucc) {
Chris Lattnerefff3222009-12-09 01:59:31 +00001618 LoadPtr = Address.PHITranslateWithInsertion(LoadBB, UnavailablePred,
1619 *DT, NewInsts);
Chris Lattner80c535b2009-11-28 16:08:18 +00001620 } else {
Chris Lattnerefff3222009-12-09 01:59:31 +00001621 Address.PHITranslateValue(LoadBB, UnavailablePred);
1622 LoadPtr = Address.getAddr();
1623
1624 // Make sure the value is live in the predecessor.
1625 if (Instruction *Inst = dyn_cast_or_null<Instruction>(LoadPtr))
1626 if (!DT->dominates(Inst->getParent(), UnavailablePred))
1627 LoadPtr = 0;
1628 }
1629
1630 // If we couldn't find or insert a computation of this phi translated value,
1631 // we fail PRE.
1632 if (LoadPtr == 0) {
1633 assert(NewInsts.empty() && "Shouldn't insert insts on failure");
1634 DEBUG(errs() << "COULDN'T INSERT PHI TRANSLATED VALUE OF: "
1635 << *LI->getOperand(0) << "\n");
1636 return false;
Chris Lattner80c535b2009-11-28 16:08:18 +00001637 }
Owen Anderson2c405b92009-12-03 03:43:29 +00001638
1639 // Assign value numbers to these new instructions.
Chris Lattnerefff3222009-12-09 01:59:31 +00001640 for (unsigned i = 0, e = NewInsts.size(); i != e; ++i) {
Owen Anderson2c405b92009-12-03 03:43:29 +00001641 // FIXME: We really _ought_ to insert these value numbers into their
1642 // parent's availability map. However, in doing so, we risk getting into
1643 // ordering issues. If a block hasn't been processed yet, we would be
1644 // marking a value as AVAIL-IN, which isn't what we intend.
Chris Lattnerefff3222009-12-09 01:59:31 +00001645 VN.lookup_or_add(NewInsts[i]);
Chris Lattner248818e2009-11-27 08:25:10 +00001646 }
1647
Dale Johannesena19b67f2009-06-17 20:48:23 +00001648 // Make sure it is valid to move this load here. We have to watch out for:
1649 // @1 = getelementptr (i8* p, ...
1650 // test p and branch if == 0
1651 // load @1
1652 // It is valid to have the getelementptr before the test, even if p can be 0,
1653 // as getelementptr only does address arithmetic.
1654 // If we are not pushing the value through any multiple-successor blocks
1655 // we do not have this case. Otherwise, check that the load is safe to
1656 // put anywhere; this can be improved, but should be conservatively safe.
1657 if (!allSingleSucc &&
Chris Lattner1c2de2b2009-11-28 15:39:14 +00001658 // FIXME: REEVALUTE THIS.
Chris Lattner80c535b2009-11-28 16:08:18 +00001659 !isSafeToLoadUnconditionally(LoadPtr, UnavailablePred->getTerminator())) {
1660 assert(NewInsts.empty() && "Should not have inserted instructions");
Dale Johannesena19b67f2009-06-17 20:48:23 +00001661 return false;
Chris Lattner80c535b2009-11-28 16:08:18 +00001662 }
Dale Johannesena19b67f2009-06-17 20:48:23 +00001663
Chris Lattnerdcded152008-12-02 08:16:11 +00001664 // Okay, we can eliminate this load by inserting a reload in the predecessor
1665 // and using PHI construction to get the value in the other predecessors, do
1666 // it.
Dan Gohman7e124382009-07-31 20:24:18 +00001667 DEBUG(errs() << "GVN REMOVING PRE LOAD: " << *LI << '\n');
Chris Lattner80c535b2009-11-28 16:08:18 +00001668 DEBUG(if (!NewInsts.empty())
1669 errs() << "INSERTED " << NewInsts.size() << " INSTS: "
1670 << *NewInsts.back() << '\n');
1671
Chris Lattnerdcded152008-12-02 08:16:11 +00001672 Value *NewLoad = new LoadInst(LoadPtr, LI->getName()+".pre", false,
1673 LI->getAlignment(),
1674 UnavailablePred->getTerminator());
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001675
Chris Lattner6e5ea272009-10-10 23:50:30 +00001676 // Add the newly created load.
1677 ValuesPerBlock.push_back(AvailableValueInBlock::get(UnavailablePred,NewLoad));
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001678
Chris Lattnerdcded152008-12-02 08:16:11 +00001679 // Perform PHI construction.
Chris Lattner6e5ea272009-10-10 23:50:30 +00001680 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD,
1681 VN.getAliasAnalysis());
Chris Lattnerd6b1d052009-09-20 20:09:34 +00001682 LI->replaceAllUsesWith(V);
1683 if (isa<PHINode>(V))
1684 V->takeName(LI);
1685 if (isa<PointerType>(V->getType()))
1686 MD->invalidateCachedPointerInfo(V);
Chris Lattnerdcded152008-12-02 08:16:11 +00001687 toErase.push_back(LI);
1688 NumPRELoad++;
Owen Anderson5d72a422007-07-25 19:57:03 +00001689 return true;
1690}
1691
Owen Andersone0143452007-08-16 22:02:55 +00001692/// processLoad - Attempt to eliminate a load, first by eliminating it
1693/// locally, and then attempting non-local elimination if that fails.
Chris Lattner4531da82008-12-05 21:04:20 +00001694bool GVN::processLoad(LoadInst *L, SmallVectorImpl<Instruction*> &toErase) {
Dan Gohmanc8d26652009-11-14 02:27:51 +00001695 if (!MD)
1696 return false;
1697
Chris Lattner4531da82008-12-05 21:04:20 +00001698 if (L->isVolatile())
Owen Anderson85c40642007-07-24 17:55:58 +00001699 return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001700
Owen Anderson85c40642007-07-24 17:55:58 +00001701 // ... to a pointer that has been loaded from before...
Chris Lattnerff36c952009-09-21 02:42:51 +00001702 MemDepResult Dep = MD->getDependency(L);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001703
Chris Lattner4531da82008-12-05 21:04:20 +00001704 // If the value isn't available, don't do anything!
Chris Lattnerff36c952009-09-21 02:42:51 +00001705 if (Dep.isClobber()) {
Chris Lattner0907b522009-09-21 05:57:11 +00001706 // Check to see if we have something like this:
Chris Lattner7741aa52009-09-20 19:03:47 +00001707 // store i32 123, i32* %P
1708 // %A = bitcast i32* %P to i8*
1709 // %B = gep i8* %A, i32 1
1710 // %C = load i8* %B
1711 //
1712 // We could do that by recognizing if the clobber instructions are obviously
1713 // a common base + constant offset, and if the previous store (or memset)
1714 // completely covers this load. This sort of thing can happen in bitfield
1715 // access code.
Chris Lattnercb00f732009-12-06 01:57:02 +00001716 Value *AvailVal = 0;
Chris Lattner0907b522009-09-21 05:57:11 +00001717 if (StoreInst *DepSI = dyn_cast<StoreInst>(Dep.getInst()))
Chris Lattner41eb59c2009-09-21 06:22:46 +00001718 if (const TargetData *TD = getAnalysisIfAvailable<TargetData>()) {
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001719 int Offset = AnalyzeLoadFromClobberingStore(L->getType(),
1720 L->getPointerOperand(),
1721 DepSI, *TD);
Chris Lattnercb00f732009-12-06 01:57:02 +00001722 if (Offset != -1)
1723 AvailVal = GetStoreValueForLoad(DepSI->getOperand(0), Offset,
1724 L->getType(), L, *TD);
Chris Lattner41eb59c2009-09-21 06:22:46 +00001725 }
Chris Lattner0907b522009-09-21 05:57:11 +00001726
Chris Lattnercb00f732009-12-06 01:57:02 +00001727 // If the clobbering value is a memset/memcpy/memmove, see if we can forward
1728 // a value on from it.
1729 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(Dep.getInst())) {
1730 if (const TargetData *TD = getAnalysisIfAvailable<TargetData>()) {
Chris Lattnerfd9feaa2009-12-09 07:37:07 +00001731 int Offset = AnalyzeLoadFromClobberingMemInst(L->getType(),
1732 L->getPointerOperand(),
1733 DepMI, *TD);
Chris Lattnercb00f732009-12-06 01:57:02 +00001734 if (Offset != -1)
1735 AvailVal = GetMemInstValueForLoad(DepMI, Offset, L->getType(), L,*TD);
1736 }
1737 }
1738
1739 if (AvailVal) {
1740 DEBUG(errs() << "GVN COERCED INST:\n" << *Dep.getInst() << '\n'
1741 << *AvailVal << '\n' << *L << "\n\n\n");
1742
1743 // Replace the load!
1744 L->replaceAllUsesWith(AvailVal);
1745 if (isa<PointerType>(AvailVal->getType()))
1746 MD->invalidateCachedPointerInfo(AvailVal);
1747 toErase.push_back(L);
1748 NumGVNLoad++;
1749 return true;
1750 }
1751
Edwin Török47cf8842009-05-29 09:46:03 +00001752 DEBUG(
1753 // fast print dep, using operator<< on instruction would be too slow
Dan Gohman0be10b02009-07-25 01:43:01 +00001754 errs() << "GVN: load ";
1755 WriteAsOperand(errs(), L);
Chris Lattnerff36c952009-09-21 02:42:51 +00001756 Instruction *I = Dep.getInst();
Dan Gohman7e124382009-07-31 20:24:18 +00001757 errs() << " is clobbered by " << *I << '\n';
Edwin Török47cf8842009-05-29 09:46:03 +00001758 );
Chris Lattner4531da82008-12-05 21:04:20 +00001759 return false;
Edwin Török47cf8842009-05-29 09:46:03 +00001760 }
Chris Lattner4531da82008-12-05 21:04:20 +00001761
1762 // If it is defined in another block, try harder.
Chris Lattnerff36c952009-09-21 02:42:51 +00001763 if (Dep.isNonLocal())
Chris Lattner4531da82008-12-05 21:04:20 +00001764 return processNonLocalLoad(L, toErase);
Eli Friedman350307f2008-02-12 12:08:14 +00001765
Chris Lattnerff36c952009-09-21 02:42:51 +00001766 Instruction *DepInst = Dep.getInst();
Chris Lattner4531da82008-12-05 21:04:20 +00001767 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInst)) {
Chris Lattner7741aa52009-09-20 19:03:47 +00001768 Value *StoredVal = DepSI->getOperand(0);
1769
1770 // The store and load are to a must-aliased pointer, but they may not
1771 // actually have the same type. See if we know how to reuse the stored
1772 // value (depending on its type).
1773 const TargetData *TD = 0;
Chris Lattner10460aa2009-10-21 04:11:19 +00001774 if (StoredVal->getType() != L->getType()) {
1775 if ((TD = getAnalysisIfAvailable<TargetData>())) {
1776 StoredVal = CoerceAvailableValueToLoadType(StoredVal, L->getType(),
1777 L, *TD);
1778 if (StoredVal == 0)
1779 return false;
1780
1781 DEBUG(errs() << "GVN COERCED STORE:\n" << *DepSI << '\n' << *StoredVal
1782 << '\n' << *L << "\n\n\n");
1783 }
1784 else
Chris Lattner7741aa52009-09-20 19:03:47 +00001785 return false;
Chris Lattner7741aa52009-09-20 19:03:47 +00001786 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001787
Chris Lattner4531da82008-12-05 21:04:20 +00001788 // Remove it!
Chris Lattner7741aa52009-09-20 19:03:47 +00001789 L->replaceAllUsesWith(StoredVal);
1790 if (isa<PointerType>(StoredVal->getType()))
1791 MD->invalidateCachedPointerInfo(StoredVal);
Chris Lattner4531da82008-12-05 21:04:20 +00001792 toErase.push_back(L);
1793 NumGVNLoad++;
1794 return true;
1795 }
1796
1797 if (LoadInst *DepLI = dyn_cast<LoadInst>(DepInst)) {
Chris Lattner7741aa52009-09-20 19:03:47 +00001798 Value *AvailableVal = DepLI;
1799
1800 // The loads are of a must-aliased pointer, but they may not actually have
1801 // the same type. See if we know how to reuse the previously loaded value
1802 // (depending on its type).
1803 const TargetData *TD = 0;
Chris Lattner10460aa2009-10-21 04:11:19 +00001804 if (DepLI->getType() != L->getType()) {
1805 if ((TD = getAnalysisIfAvailable<TargetData>())) {
1806 AvailableVal = CoerceAvailableValueToLoadType(DepLI, L->getType(), L,*TD);
1807 if (AvailableVal == 0)
1808 return false;
Chris Lattner7741aa52009-09-20 19:03:47 +00001809
Chris Lattner10460aa2009-10-21 04:11:19 +00001810 DEBUG(errs() << "GVN COERCED LOAD:\n" << *DepLI << "\n" << *AvailableVal
1811 << "\n" << *L << "\n\n\n");
1812 }
1813 else
1814 return false;
Chris Lattner7741aa52009-09-20 19:03:47 +00001815 }
1816
Chris Lattner4531da82008-12-05 21:04:20 +00001817 // Remove it!
Chris Lattner7741aa52009-09-20 19:03:47 +00001818 L->replaceAllUsesWith(AvailableVal);
Chris Lattnerf81b0142008-12-09 22:06:23 +00001819 if (isa<PointerType>(DepLI->getType()))
1820 MD->invalidateCachedPointerInfo(DepLI);
Chris Lattner4531da82008-12-05 21:04:20 +00001821 toErase.push_back(L);
1822 NumGVNLoad++;
1823 return true;
1824 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001825
Chris Lattner8ea60462008-11-30 01:39:32 +00001826 // If this load really doesn't depend on anything, then we must be loading an
1827 // undef value. This can happen when loading for a fresh allocation with no
1828 // intervening stores, for example.
Victor Hernandezb1687302009-10-23 21:09:37 +00001829 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst)) {
Owen Andersonb99ecca2009-07-30 23:03:37 +00001830 L->replaceAllUsesWith(UndefValue::get(L->getType()));
Chris Lattner8ea60462008-11-30 01:39:32 +00001831 toErase.push_back(L);
Chris Lattner8ea60462008-11-30 01:39:32 +00001832 NumGVNLoad++;
Chris Lattner4531da82008-12-05 21:04:20 +00001833 return true;
Eli Friedman350307f2008-02-12 12:08:14 +00001834 }
Owen Andersonc07861a2009-10-28 07:05:35 +00001835
Owen Andersonf187daf2009-12-02 07:35:19 +00001836 // If this load occurs either right after a lifetime begin,
Owen Andersonc07861a2009-10-28 07:05:35 +00001837 // then the loaded value is undefined.
1838 if (IntrinsicInst* II = dyn_cast<IntrinsicInst>(DepInst)) {
Owen Andersonf187daf2009-12-02 07:35:19 +00001839 if (II->getIntrinsicID() == Intrinsic::lifetime_start) {
Owen Andersonc07861a2009-10-28 07:05:35 +00001840 L->replaceAllUsesWith(UndefValue::get(L->getType()));
1841 toErase.push_back(L);
1842 NumGVNLoad++;
1843 return true;
1844 }
1845 }
Eli Friedman350307f2008-02-12 12:08:14 +00001846
Chris Lattner4531da82008-12-05 21:04:20 +00001847 return false;
Owen Anderson85c40642007-07-24 17:55:58 +00001848}
1849
Chris Lattnerff36c952009-09-21 02:42:51 +00001850Value *GVN::lookupNumber(BasicBlock *BB, uint32_t num) {
Owen Andersonaef6a922008-06-23 17:49:45 +00001851 DenseMap<BasicBlock*, ValueNumberScope*>::iterator I = localAvail.find(BB);
1852 if (I == localAvail.end())
1853 return 0;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001854
Chris Lattnerff36c952009-09-21 02:42:51 +00001855 ValueNumberScope *Locals = I->second;
1856 while (Locals) {
1857 DenseMap<uint32_t, Value*>::iterator I = Locals->table.find(num);
1858 if (I != Locals->table.end())
Owen Anderson2a412722008-06-20 01:15:47 +00001859 return I->second;
Chris Lattnerff36c952009-09-21 02:42:51 +00001860 Locals = Locals->parent;
Owen Anderson2a412722008-06-20 01:15:47 +00001861 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001862
Owen Anderson2a412722008-06-20 01:15:47 +00001863 return 0;
1864}
1865
Owen Andersona03e7862008-12-15 02:03:00 +00001866
Owen Andersonf631bb62007-08-14 18:16:29 +00001867/// processInstruction - When calculating availability, handle an instruction
Owen Anderson85c40642007-07-24 17:55:58 +00001868/// by inserting it into the appropriate sets
Owen Anderson9334fc62008-06-12 19:25:32 +00001869bool GVN::processInstruction(Instruction *I,
Chris Lattner7de20452008-03-21 22:01:16 +00001870 SmallVectorImpl<Instruction*> &toErase) {
Chris Lattnerff36c952009-09-21 02:42:51 +00001871 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
1872 bool Changed = processLoad(LI, toErase);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001873
Chris Lattnerff36c952009-09-21 02:42:51 +00001874 if (!Changed) {
1875 unsigned Num = VN.lookup_or_add(LI);
1876 localAvail[I->getParent()]->table.insert(std::make_pair(Num, LI));
Owen Andersone6b4ff82008-06-18 21:41:49 +00001877 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001878
Chris Lattnerff36c952009-09-21 02:42:51 +00001879 return Changed;
Owen Andersone6b4ff82008-06-18 21:41:49 +00001880 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001881
Chris Lattnerff36c952009-09-21 02:42:51 +00001882 uint32_t NextNum = VN.getNextUnusedValueNumber();
1883 unsigned Num = VN.lookup_or_add(I);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001884
Chris Lattnerff36c952009-09-21 02:42:51 +00001885 if (BranchInst *BI = dyn_cast<BranchInst>(I)) {
1886 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001887
Owen Andersonef8bf0f2009-04-01 23:53:49 +00001888 if (!BI->isConditional() || isa<Constant>(BI->getCondition()))
1889 return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001890
Chris Lattnerff36c952009-09-21 02:42:51 +00001891 Value *BranchCond = BI->getCondition();
1892 uint32_t CondVN = VN.lookup_or_add(BranchCond);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001893
Chris Lattnerff36c952009-09-21 02:42:51 +00001894 BasicBlock *TrueSucc = BI->getSuccessor(0);
1895 BasicBlock *FalseSucc = BI->getSuccessor(1);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001896
Chris Lattnerff36c952009-09-21 02:42:51 +00001897 if (TrueSucc->getSinglePredecessor())
1898 localAvail[TrueSucc]->table[CondVN] =
1899 ConstantInt::getTrue(TrueSucc->getContext());
1900 if (FalseSucc->getSinglePredecessor())
1901 localAvail[FalseSucc]->table[CondVN] =
1902 ConstantInt::getFalse(TrueSucc->getContext());
Owen Andersonef8bf0f2009-04-01 23:53:49 +00001903
1904 return false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001905
Owen Andersonced50f82008-04-07 09:59:07 +00001906 // Allocations are always uniquely numbered, so we can save time and memory
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001907 // by fast failing them.
Victor Hernandezb1687302009-10-23 21:09:37 +00001908 } else if (isa<AllocaInst>(I) || isa<TerminatorInst>(I)) {
Chris Lattnerff36c952009-09-21 02:42:51 +00001909 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Andersonced50f82008-04-07 09:59:07 +00001910 return false;
Owen Andersone6b4ff82008-06-18 21:41:49 +00001911 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001912
Owen Andersone0143452007-08-16 22:02:55 +00001913 // Collapse PHI nodes
Owen Anderson98f6a6b2007-08-14 18:33:27 +00001914 if (PHINode* p = dyn_cast<PHINode>(I)) {
Chris Lattnerff36c952009-09-21 02:42:51 +00001915 Value *constVal = CollapsePhi(p);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001916
Owen Anderson98f6a6b2007-08-14 18:33:27 +00001917 if (constVal) {
Owen Andersone02ad522007-08-16 22:51:56 +00001918 p->replaceAllUsesWith(constVal);
Dan Gohmanc8d26652009-11-14 02:27:51 +00001919 if (MD && isa<PointerType>(constVal->getType()))
Chris Lattnerf81b0142008-12-09 22:06:23 +00001920 MD->invalidateCachedPointerInfo(constVal);
Owen Anderson575f2812008-12-23 00:49:51 +00001921 VN.erase(p);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001922
Owen Andersone02ad522007-08-16 22:51:56 +00001923 toErase.push_back(p);
Owen Andersone6b4ff82008-06-18 21:41:49 +00001924 } else {
Chris Lattnerff36c952009-09-21 02:42:51 +00001925 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Anderson98f6a6b2007-08-14 18:33:27 +00001926 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001927
Owen Anderson8a8d13c2008-07-03 17:44:33 +00001928 // If the number we were assigned was a brand new VN, then we don't
1929 // need to do a lookup to see if the number already exists
1930 // somewhere in the domtree: it can't!
Chris Lattnerff36c952009-09-21 02:42:51 +00001931 } else if (Num == NextNum) {
1932 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001933
Owen Andersona03e7862008-12-15 02:03:00 +00001934 // Perform fast-path value-number based elimination of values inherited from
1935 // dominators.
Chris Lattnerff36c952009-09-21 02:42:51 +00001936 } else if (Value *repl = lookupNumber(I->getParent(), Num)) {
Owen Andersonc772be72007-12-08 01:37:09 +00001937 // Remove it!
Owen Anderson5aff8002007-07-31 23:27:13 +00001938 VN.erase(I);
Owen Anderson85c40642007-07-24 17:55:58 +00001939 I->replaceAllUsesWith(repl);
Dan Gohmanc8d26652009-11-14 02:27:51 +00001940 if (MD && isa<PointerType>(repl->getType()))
Chris Lattnerf81b0142008-12-09 22:06:23 +00001941 MD->invalidateCachedPointerInfo(repl);
Owen Anderson85c40642007-07-24 17:55:58 +00001942 toErase.push_back(I);
1943 return true;
Owen Andersona03e7862008-12-15 02:03:00 +00001944
Owen Anderson8a8d13c2008-07-03 17:44:33 +00001945 } else {
Chris Lattnerff36c952009-09-21 02:42:51 +00001946 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Anderson85c40642007-07-24 17:55:58 +00001947 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001948
Owen Anderson85c40642007-07-24 17:55:58 +00001949 return false;
1950}
1951
Bill Wendling42f17f62008-12-22 22:32:22 +00001952/// runOnFunction - This is the main transformation entry point for a function.
Owen Andersonbe168b32007-08-14 18:04:11 +00001953bool GVN::runOnFunction(Function& F) {
Dan Gohmanc8d26652009-11-14 02:27:51 +00001954 if (!NoLoads)
1955 MD = &getAnalysis<MemoryDependenceAnalysis>();
Chris Lattner02ca4422008-12-01 00:40:32 +00001956 DT = &getAnalysis<DominatorTree>();
Owen Andersonbcf2bd52008-05-12 20:15:55 +00001957 VN.setAliasAnalysis(&getAnalysis<AliasAnalysis>());
Chris Lattner02ca4422008-12-01 00:40:32 +00001958 VN.setMemDep(MD);
1959 VN.setDomTree(DT);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001960
Chris Lattnerff36c952009-09-21 02:42:51 +00001961 bool Changed = false;
1962 bool ShouldContinue = true;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001963
Owen Anderson26ed2572008-07-16 17:52:31 +00001964 // Merge unconditional branches, allowing PRE to catch more
1965 // optimization opportunities.
1966 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ) {
Chris Lattnerff36c952009-09-21 02:42:51 +00001967 BasicBlock *BB = FI;
Owen Anderson26ed2572008-07-16 17:52:31 +00001968 ++FI;
Owen Andersonf59eef82008-07-17 00:01:40 +00001969 bool removedBlock = MergeBlockIntoPredecessor(BB, this);
1970 if (removedBlock) NumGVNBlocks++;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001971
Chris Lattnerff36c952009-09-21 02:42:51 +00001972 Changed |= removedBlock;
Owen Anderson26ed2572008-07-16 17:52:31 +00001973 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001974
Chris Lattner4bab29b2008-12-09 19:21:47 +00001975 unsigned Iteration = 0;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001976
Chris Lattnerff36c952009-09-21 02:42:51 +00001977 while (ShouldContinue) {
Dan Gohman0be10b02009-07-25 01:43:01 +00001978 DEBUG(errs() << "GVN iteration: " << Iteration << "\n");
Chris Lattnerff36c952009-09-21 02:42:51 +00001979 ShouldContinue = iterateOnFunction(F);
1980 Changed |= ShouldContinue;
Chris Lattner4bab29b2008-12-09 19:21:47 +00001981 ++Iteration;
Owen Andersonbe168b32007-08-14 18:04:11 +00001982 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00001983
Owen Anderson916f4732008-07-18 18:03:38 +00001984 if (EnablePRE) {
Owen Anderson9c935902008-09-03 23:06:07 +00001985 bool PREChanged = true;
1986 while (PREChanged) {
1987 PREChanged = performPRE(F);
Chris Lattnerff36c952009-09-21 02:42:51 +00001988 Changed |= PREChanged;
Owen Anderson9c935902008-09-03 23:06:07 +00001989 }
Owen Anderson916f4732008-07-18 18:03:38 +00001990 }
Chris Lattner4bab29b2008-12-09 19:21:47 +00001991 // FIXME: Should perform GVN again after PRE does something. PRE can move
1992 // computations into blocks where they become fully redundant. Note that
1993 // we can't do this until PRE's critical edge splitting updates memdep.
1994 // Actually, when this happens, we should just fully integrate PRE into GVN.
Nuno Lopes274474b2008-10-10 16:25:50 +00001995
1996 cleanupGlobalSets();
1997
Chris Lattnerff36c952009-09-21 02:42:51 +00001998 return Changed;
Owen Andersonbe168b32007-08-14 18:04:11 +00001999}
2000
2001
Chris Lattnerff36c952009-09-21 02:42:51 +00002002bool GVN::processBlock(BasicBlock *BB) {
Chris Lattner4bab29b2008-12-09 19:21:47 +00002003 // FIXME: Kill off toErase by doing erasing eagerly in a helper function (and
2004 // incrementing BI before processing an instruction).
Owen Anderson9334fc62008-06-12 19:25:32 +00002005 SmallVector<Instruction*, 8> toErase;
Chris Lattnerff36c952009-09-21 02:42:51 +00002006 bool ChangedFunction = false;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002007
Owen Anderson9334fc62008-06-12 19:25:32 +00002008 for (BasicBlock::iterator BI = BB->begin(), BE = BB->end();
2009 BI != BE;) {
Chris Lattnerff36c952009-09-21 02:42:51 +00002010 ChangedFunction |= processInstruction(BI, toErase);
Owen Anderson9334fc62008-06-12 19:25:32 +00002011 if (toErase.empty()) {
2012 ++BI;
2013 continue;
2014 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002015
Owen Anderson9334fc62008-06-12 19:25:32 +00002016 // If we need some instructions deleted, do it now.
2017 NumGVNInstr += toErase.size();
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002018
Owen Anderson9334fc62008-06-12 19:25:32 +00002019 // Avoid iterator invalidation.
2020 bool AtStart = BI == BB->begin();
2021 if (!AtStart)
2022 --BI;
2023
2024 for (SmallVector<Instruction*, 4>::iterator I = toErase.begin(),
Chris Lattner02ca4422008-12-01 00:40:32 +00002025 E = toErase.end(); I != E; ++I) {
Dan Gohman7e124382009-07-31 20:24:18 +00002026 DEBUG(errs() << "GVN removed: " << **I << '\n');
Dan Gohmanc8d26652009-11-14 02:27:51 +00002027 if (MD) MD->removeInstruction(*I);
Owen Anderson9334fc62008-06-12 19:25:32 +00002028 (*I)->eraseFromParent();
Bill Wendling84049422008-12-22 21:57:30 +00002029 DEBUG(verifyRemoved(*I));
Chris Lattner02ca4422008-12-01 00:40:32 +00002030 }
Chris Lattner4bab29b2008-12-09 19:21:47 +00002031 toErase.clear();
Owen Anderson9334fc62008-06-12 19:25:32 +00002032
2033 if (AtStart)
2034 BI = BB->begin();
2035 else
2036 ++BI;
Owen Anderson9334fc62008-06-12 19:25:32 +00002037 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002038
Chris Lattnerff36c952009-09-21 02:42:51 +00002039 return ChangedFunction;
Owen Anderson9334fc62008-06-12 19:25:32 +00002040}
2041
Owen Andersone6b4ff82008-06-18 21:41:49 +00002042/// performPRE - Perform a purely local form of PRE that looks for diamond
2043/// control flow patterns and attempts to perform simple PRE at the join point.
Chris Lattner4790cb42009-10-31 22:11:15 +00002044bool GVN::performPRE(Function &F) {
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002045 bool Changed = false;
Owen Andersonec747c42008-06-19 19:54:19 +00002046 SmallVector<std::pair<TerminatorInst*, unsigned>, 4> toSplit;
Chris Lattner3304b562008-12-01 07:29:03 +00002047 DenseMap<BasicBlock*, Value*> predMap;
Owen Andersone6b4ff82008-06-18 21:41:49 +00002048 for (df_iterator<BasicBlock*> DI = df_begin(&F.getEntryBlock()),
2049 DE = df_end(&F.getEntryBlock()); DI != DE; ++DI) {
Chris Lattnerff36c952009-09-21 02:42:51 +00002050 BasicBlock *CurrentBlock = *DI;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002051
Owen Andersone6b4ff82008-06-18 21:41:49 +00002052 // Nothing to PRE in the entry block.
2053 if (CurrentBlock == &F.getEntryBlock()) continue;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002054
Owen Andersone6b4ff82008-06-18 21:41:49 +00002055 for (BasicBlock::iterator BI = CurrentBlock->begin(),
2056 BE = CurrentBlock->end(); BI != BE; ) {
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002057 Instruction *CurInst = BI++;
Duncan Sands2f500832009-05-06 06:49:50 +00002058
Victor Hernandezb1687302009-10-23 21:09:37 +00002059 if (isa<AllocaInst>(CurInst) ||
Victor Hernandez48c3c542009-09-18 22:35:49 +00002060 isa<TerminatorInst>(CurInst) || isa<PHINode>(CurInst) ||
Devang Patele9d08b82009-10-14 17:29:00 +00002061 CurInst->getType()->isVoidTy() ||
Duncan Sands2f500832009-05-06 06:49:50 +00002062 CurInst->mayReadFromMemory() || CurInst->mayHaveSideEffects() ||
John Criswell6e0aa282009-03-10 15:04:53 +00002063 isa<DbgInfoIntrinsic>(CurInst))
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002064 continue;
Duncan Sands2f500832009-05-06 06:49:50 +00002065
Chris Lattnerff36c952009-09-21 02:42:51 +00002066 uint32_t ValNo = VN.lookup(CurInst);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002067
Owen Andersone6b4ff82008-06-18 21:41:49 +00002068 // Look for the predecessors for PRE opportunities. We're
2069 // only trying to solve the basic diamond case, where
2070 // a value is computed in the successor and one predecessor,
2071 // but not the other. We also explicitly disallow cases
2072 // where the successor is its own predecessor, because they're
2073 // more complicated to get right.
Chris Lattnerff36c952009-09-21 02:42:51 +00002074 unsigned NumWith = 0;
2075 unsigned NumWithout = 0;
2076 BasicBlock *PREPred = 0;
Chris Lattner3304b562008-12-01 07:29:03 +00002077 predMap.clear();
2078
Owen Andersone6b4ff82008-06-18 21:41:49 +00002079 for (pred_iterator PI = pred_begin(CurrentBlock),
2080 PE = pred_end(CurrentBlock); PI != PE; ++PI) {
2081 // We're not interested in PRE where the block is its
Owen Anderson2a412722008-06-20 01:15:47 +00002082 // own predecessor, on in blocks with predecessors
2083 // that are not reachable.
2084 if (*PI == CurrentBlock) {
Chris Lattnerff36c952009-09-21 02:42:51 +00002085 NumWithout = 2;
Owen Anderson2a412722008-06-20 01:15:47 +00002086 break;
2087 } else if (!localAvail.count(*PI)) {
Chris Lattnerff36c952009-09-21 02:42:51 +00002088 NumWithout = 2;
Owen Anderson2a412722008-06-20 01:15:47 +00002089 break;
2090 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002091
2092 DenseMap<uint32_t, Value*>::iterator predV =
Chris Lattnerff36c952009-09-21 02:42:51 +00002093 localAvail[*PI]->table.find(ValNo);
Owen Anderson2a412722008-06-20 01:15:47 +00002094 if (predV == localAvail[*PI]->table.end()) {
Owen Andersone6b4ff82008-06-18 21:41:49 +00002095 PREPred = *PI;
Chris Lattnerff36c952009-09-21 02:42:51 +00002096 NumWithout++;
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002097 } else if (predV->second == CurInst) {
Chris Lattnerff36c952009-09-21 02:42:51 +00002098 NumWithout = 2;
Owen Andersone6b4ff82008-06-18 21:41:49 +00002099 } else {
Owen Anderson2a412722008-06-20 01:15:47 +00002100 predMap[*PI] = predV->second;
Chris Lattnerff36c952009-09-21 02:42:51 +00002101 NumWith++;
Owen Andersone6b4ff82008-06-18 21:41:49 +00002102 }
2103 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002104
Owen Andersone6b4ff82008-06-18 21:41:49 +00002105 // Don't do PRE when it might increase code size, i.e. when
2106 // we would need to insert instructions in more than one pred.
Chris Lattnerff36c952009-09-21 02:42:51 +00002107 if (NumWithout != 1 || NumWith == 0)
Owen Andersone6b4ff82008-06-18 21:41:49 +00002108 continue;
Chris Lattner4790cb42009-10-31 22:11:15 +00002109
2110 // Don't do PRE across indirect branch.
2111 if (isa<IndirectBrInst>(PREPred->getTerminator()))
2112 continue;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002113
Owen Andersonec747c42008-06-19 19:54:19 +00002114 // We can't do PRE safely on a critical edge, so instead we schedule
2115 // the edge to be split and perform the PRE the next time we iterate
2116 // on the function.
Chris Lattnerff36c952009-09-21 02:42:51 +00002117 unsigned SuccNum = 0;
Owen Andersonec747c42008-06-19 19:54:19 +00002118 for (unsigned i = 0, e = PREPred->getTerminator()->getNumSuccessors();
2119 i != e; ++i)
Owen Anderson9c935902008-09-03 23:06:07 +00002120 if (PREPred->getTerminator()->getSuccessor(i) == CurrentBlock) {
Chris Lattnerff36c952009-09-21 02:42:51 +00002121 SuccNum = i;
Owen Andersonec747c42008-06-19 19:54:19 +00002122 break;
2123 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002124
Chris Lattnerff36c952009-09-21 02:42:51 +00002125 if (isCriticalEdge(PREPred->getTerminator(), SuccNum)) {
2126 toSplit.push_back(std::make_pair(PREPred->getTerminator(), SuccNum));
Owen Andersonec747c42008-06-19 19:54:19 +00002127 continue;
2128 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002129
Owen Andersone6b4ff82008-06-18 21:41:49 +00002130 // Instantiate the expression the in predecessor that lacked it.
2131 // Because we are going top-down through the block, all value numbers
2132 // will be available in the predecessor by the time we need them. Any
2133 // that weren't original present will have been instantiated earlier
2134 // in this loop.
Nick Lewyckyc94270c2009-09-27 07:38:41 +00002135 Instruction *PREInstr = CurInst->clone();
Owen Andersone6b4ff82008-06-18 21:41:49 +00002136 bool success = true;
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002137 for (unsigned i = 0, e = CurInst->getNumOperands(); i != e; ++i) {
2138 Value *Op = PREInstr->getOperand(i);
2139 if (isa<Argument>(Op) || isa<Constant>(Op) || isa<GlobalValue>(Op))
2140 continue;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002141
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002142 if (Value *V = lookupNumber(PREPred, VN.lookup(Op))) {
2143 PREInstr->setOperand(i, V);
2144 } else {
2145 success = false;
2146 break;
Owen Anderson14c612f2008-07-11 20:05:13 +00002147 }
Owen Andersone6b4ff82008-06-18 21:41:49 +00002148 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002149
Owen Andersone6b4ff82008-06-18 21:41:49 +00002150 // Fail out if we encounter an operand that is not available in
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002151 // the PRE predecessor. This is typically because of loads which
Owen Andersone6b4ff82008-06-18 21:41:49 +00002152 // are not value numbered precisely.
2153 if (!success) {
2154 delete PREInstr;
Bill Wendling3858cae2008-12-22 22:14:07 +00002155 DEBUG(verifyRemoved(PREInstr));
Owen Andersone6b4ff82008-06-18 21:41:49 +00002156 continue;
2157 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002158
Owen Andersone6b4ff82008-06-18 21:41:49 +00002159 PREInstr->insertBefore(PREPred->getTerminator());
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002160 PREInstr->setName(CurInst->getName() + ".pre");
Owen Anderson2a412722008-06-20 01:15:47 +00002161 predMap[PREPred] = PREInstr;
Chris Lattnerff36c952009-09-21 02:42:51 +00002162 VN.add(PREInstr, ValNo);
Owen Andersone6b4ff82008-06-18 21:41:49 +00002163 NumGVNPRE++;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002164
Owen Andersone6b4ff82008-06-18 21:41:49 +00002165 // Update the availability map to include the new instruction.
Chris Lattnerff36c952009-09-21 02:42:51 +00002166 localAvail[PREPred]->table.insert(std::make_pair(ValNo, PREInstr));
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002167
Owen Andersone6b4ff82008-06-18 21:41:49 +00002168 // Create a PHI to make the value available in this block.
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002169 PHINode* Phi = PHINode::Create(CurInst->getType(),
2170 CurInst->getName() + ".pre-phi",
Owen Andersone6b4ff82008-06-18 21:41:49 +00002171 CurrentBlock->begin());
2172 for (pred_iterator PI = pred_begin(CurrentBlock),
2173 PE = pred_end(CurrentBlock); PI != PE; ++PI)
Owen Anderson2a412722008-06-20 01:15:47 +00002174 Phi->addIncoming(predMap[*PI], *PI);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002175
Chris Lattnerff36c952009-09-21 02:42:51 +00002176 VN.add(Phi, ValNo);
2177 localAvail[CurrentBlock]->table[ValNo] = Phi;
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002178
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002179 CurInst->replaceAllUsesWith(Phi);
Dan Gohmanc8d26652009-11-14 02:27:51 +00002180 if (MD && isa<PointerType>(Phi->getType()))
Chris Lattnerf81b0142008-12-09 22:06:23 +00002181 MD->invalidateCachedPointerInfo(Phi);
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002182 VN.erase(CurInst);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002183
Dan Gohman7e124382009-07-31 20:24:18 +00002184 DEBUG(errs() << "GVN PRE removed: " << *CurInst << '\n');
Dan Gohmanc8d26652009-11-14 02:27:51 +00002185 if (MD) MD->removeInstruction(CurInst);
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002186 CurInst->eraseFromParent();
Bill Wendling84049422008-12-22 21:57:30 +00002187 DEBUG(verifyRemoved(CurInst));
Chris Lattner66a3a3e2008-12-01 07:35:54 +00002188 Changed = true;
Owen Andersone6b4ff82008-06-18 21:41:49 +00002189 }
2190 }
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002191
Owen Andersonec747c42008-06-19 19:54:19 +00002192 for (SmallVector<std::pair<TerminatorInst*, unsigned>, 4>::iterator
Anton Korobeynikov2e8710c2008-12-05 19:38:49 +00002193 I = toSplit.begin(), E = toSplit.end(); I != E; ++I)
Owen Andersonec747c42008-06-19 19:54:19 +00002194 SplitCriticalEdge(I->first, I->second, this);
Daniel Dunbar3be44e62009-09-20 02:20:51 +00002195
Anton Korobeynikov2e8710c2008-12-05 19:38:49 +00002196 return Changed || toSplit.size();
Owen Andersone6b4ff82008-06-18 21:41:49 +00002197}
2198
Bill Wendling42f17f62008-12-22 22:32:22 +00002199/// iterateOnFunction - Executes one iteration of GVN
Owen Andersonbe168b32007-08-14 18:04:11 +00002200bool GVN::iterateOnFunction(Function &F) {
Nuno Lopes274474b2008-10-10 16:25:50 +00002201 cleanupGlobalSets();
Chris Lattner98054902008-03-21 21:33:23 +00002202
Owen Andersonef8bf0f2009-04-01 23:53:49 +00002203 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2204 DE = df_end(DT->getRootNode()); DI != DE; ++DI) {
2205 if (DI->getIDom())
2206 localAvail[DI->getBlock()] =
2207 new ValueNumberScope(localAvail[DI->getIDom()->getBlock()]);
2208 else
2209 localAvail[DI->getBlock()] = new ValueNumberScope(0);
2210 }
2211
Owen Anderson85c40642007-07-24 17:55:58 +00002212 // Top-down walk of the dominator tree
Chris Lattnerff36c952009-09-21 02:42:51 +00002213 bool Changed = false;
Owen Andersonef136f52008-12-15 03:52:17 +00002214#if 0
2215 // Needed for value numbering with phi construction to work.
Owen Andersona03e7862008-12-15 02:03:00 +00002216 ReversePostOrderTraversal<Function*> RPOT(&F);
2217 for (ReversePostOrderTraversal<Function*>::rpo_iterator RI = RPOT.begin(),
2218 RE = RPOT.end(); RI != RE; ++RI)
Chris Lattnerff36c952009-09-21 02:42:51 +00002219 Changed |= processBlock(*RI);
Owen Andersonef136f52008-12-15 03:52:17 +00002220#else
2221 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2222 DE = df_end(DT->getRootNode()); DI != DE; ++DI)
Chris Lattnerff36c952009-09-21 02:42:51 +00002223 Changed |= processBlock(DI->getBlock());
Owen Andersonef136f52008-12-15 03:52:17 +00002224#endif
2225
Chris Lattnerff36c952009-09-21 02:42:51 +00002226 return Changed;
Owen Anderson85c40642007-07-24 17:55:58 +00002227}
Nuno Lopes274474b2008-10-10 16:25:50 +00002228
2229void GVN::cleanupGlobalSets() {
2230 VN.clear();
Nuno Lopes274474b2008-10-10 16:25:50 +00002231
2232 for (DenseMap<BasicBlock*, ValueNumberScope*>::iterator
2233 I = localAvail.begin(), E = localAvail.end(); I != E; ++I)
2234 delete I->second;
2235 localAvail.clear();
2236}
Bill Wendling2a023742008-12-22 21:36:08 +00002237
2238/// verifyRemoved - Verify that the specified instruction does not occur in our
2239/// internal data structures.
Bill Wendlingf9c0e9e2008-12-22 22:28:56 +00002240void GVN::verifyRemoved(const Instruction *Inst) const {
2241 VN.verifyRemoved(Inst);
Bill Wendling3858cae2008-12-22 22:14:07 +00002242
Bill Wendlingf9c0e9e2008-12-22 22:28:56 +00002243 // Walk through the value number scope to make sure the instruction isn't
2244 // ferreted away in it.
Jeffrey Yasskin8154d2e2009-11-10 01:02:17 +00002245 for (DenseMap<BasicBlock*, ValueNumberScope*>::const_iterator
Bill Wendlingf9c0e9e2008-12-22 22:28:56 +00002246 I = localAvail.begin(), E = localAvail.end(); I != E; ++I) {
2247 const ValueNumberScope *VNS = I->second;
2248
2249 while (VNS) {
Jeffrey Yasskin8154d2e2009-11-10 01:02:17 +00002250 for (DenseMap<uint32_t, Value*>::const_iterator
Bill Wendlingf9c0e9e2008-12-22 22:28:56 +00002251 II = VNS->table.begin(), IE = VNS->table.end(); II != IE; ++II) {
2252 assert(II->second != Inst && "Inst still in value numbering scope!");
2253 }
2254
2255 VNS = VNS->parent;
Bill Wendling3858cae2008-12-22 22:14:07 +00002256 }
2257 }
Bill Wendling2a023742008-12-22 21:36:08 +00002258}