blob: 51eedbc24ce4f6a49ab02523a4bdef8df63ec017 [file] [log] [blame]
Chris Lattnerd2a653a2008-12-05 07:49:08 +00001//===- GVN.cpp - Eliminate redundant values and loads ---------------------===//
Owen Andersonab6ec2e2007-07-24 17:55:58 +00002//
3// The LLVM Compiler Infrastructure
4//
Chris Lattnerf3ebc3f2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Owen Andersonab6ec2e2007-07-24 17:55:58 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This pass performs global value numbering to eliminate fully redundant
11// instructions. It also performs simple dead load elimination.
12//
John Criswell073e4d12009-03-10 15:04:53 +000013// Note that this pass does the value numbering itself; it does not use the
Matthijs Kooijman5afc2742008-06-05 07:55:49 +000014// ValueNumbering analysis passes.
15//
Owen Andersonab6ec2e2007-07-24 17:55:58 +000016//===----------------------------------------------------------------------===//
17
18#define DEBUG_TYPE "gvn"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000019#include "llvm/Transforms/Scalar.h"
Owen Anderson5e5599b2007-07-25 19:57:03 +000020#include "llvm/BasicBlock.h"
Owen Andersondbf23cc2007-07-26 18:26:51 +000021#include "llvm/Constants.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000022#include "llvm/DerivedTypes.h"
Owen Andersondbf23cc2007-07-26 18:26:51 +000023#include "llvm/Function.h"
Devang Patele8c6d312009-03-06 02:59:27 +000024#include "llvm/IntrinsicInst.h"
Owen Andersonb5618da2009-07-03 00:17:18 +000025#include "llvm/LLVMContext.h"
Chris Lattner0a9616d2009-09-21 05:57:11 +000026#include "llvm/Operator.h"
Owen Andersondbf23cc2007-07-26 18:26:51 +000027#include "llvm/Value.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000028#include "llvm/ADT/DenseMap.h"
29#include "llvm/ADT/DepthFirstIterator.h"
Owen Andersonbfe133e2008-12-15 02:03:00 +000030#include "llvm/ADT/PostOrderIterator.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000031#include "llvm/ADT/SmallPtrSet.h"
32#include "llvm/ADT/SmallVector.h"
33#include "llvm/ADT/Statistic.h"
Owen Anderson09b83ba2007-10-18 19:39:33 +000034#include "llvm/Analysis/AliasAnalysis.h"
Chris Lattner778cb922009-12-06 05:29:56 +000035#include "llvm/Analysis/ConstantFolding.h"
36#include "llvm/Analysis/Dominators.h"
Victor Hernandezf390e042009-10-27 20:05:49 +000037#include "llvm/Analysis/MemoryBuiltins.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000038#include "llvm/Analysis/MemoryDependenceAnalysis.h"
Chris Lattner972e6d82009-12-09 01:59:31 +000039#include "llvm/Analysis/PHITransAddr.h"
Owen Andersonab6ec2e2007-07-24 17:55:58 +000040#include "llvm/Support/CFG.h"
Owen Andersone780d662008-06-19 19:57:25 +000041#include "llvm/Support/CommandLine.h"
Chris Lattnerd528b212008-03-29 04:36:18 +000042#include "llvm/Support/Debug.h"
Torok Edwin56d06592009-07-11 20:10:48 +000043#include "llvm/Support/ErrorHandling.h"
Chris Lattner0a9616d2009-09-21 05:57:11 +000044#include "llvm/Support/GetElementPtrTypeIterator.h"
Chris Lattner42376062009-12-06 01:57:02 +000045#include "llvm/Support/IRBuilder.h"
Daniel Dunbar0dd5e1e2009-07-25 00:23:56 +000046#include "llvm/Support/raw_ostream.h"
Chris Lattner1dd48c32009-09-20 19:03:47 +000047#include "llvm/Target/TargetData.h"
Owen Andersonfdf9f162008-06-19 19:54:19 +000048#include "llvm/Transforms/Utils/BasicBlockUtils.h"
Dale Johannesen81b64632009-06-17 20:48:23 +000049#include "llvm/Transforms/Utils/Local.h"
Chris Lattnerb6c65fa2009-10-10 23:50:30 +000050#include "llvm/Transforms/Utils/SSAUpdater.h"
Duncan Sands26ff6f92008-10-08 07:23:46 +000051#include <cstdio>
Owen Andersonab6ec2e2007-07-24 17:55:58 +000052using namespace llvm;
53
Bill Wendling3c793442008-12-22 22:14:07 +000054STATISTIC(NumGVNInstr, "Number of instructions deleted");
55STATISTIC(NumGVNLoad, "Number of loads deleted");
56STATISTIC(NumGVNPRE, "Number of instructions PRE'd");
Owen Anderson53d546e2008-07-15 16:28:06 +000057STATISTIC(NumGVNBlocks, "Number of blocks merged");
Bill Wendling3c793442008-12-22 22:14:07 +000058STATISTIC(NumPRELoad, "Number of loads PRE'd");
Chris Lattner168be762008-03-22 04:13:49 +000059
Evan Cheng9598f932008-06-20 01:01:07 +000060static cl::opt<bool> EnablePRE("enable-pre",
Owen Andersonaddbe3e2008-07-17 19:41:00 +000061 cl::init(true), cl::Hidden);
Dan Gohmana8f8a852009-06-15 18:30:15 +000062static cl::opt<bool> EnableLoadPRE("enable-load-pre", cl::init(true));
Owen Andersone780d662008-06-19 19:57:25 +000063
Owen Andersonab6ec2e2007-07-24 17:55:58 +000064//===----------------------------------------------------------------------===//
65// ValueTable Class
66//===----------------------------------------------------------------------===//
67
68/// This class holds the mapping between values and value numbers. It is used
69/// as an efficient mechanism to determine the expression-wise equivalence of
70/// two values.
71namespace {
Chris Lattner2dd09db2009-09-02 06:11:42 +000072 struct Expression {
Dan Gohmana5b96452009-06-04 22:49:04 +000073 enum ExpressionOpcode { ADD, FADD, SUB, FSUB, MUL, FMUL,
74 UDIV, SDIV, FDIV, UREM, SREM,
Daniel Dunbar7d6781b2009-09-20 02:20:51 +000075 FREM, SHL, LSHR, ASHR, AND, OR, XOR, ICMPEQ,
76 ICMPNE, ICMPUGT, ICMPUGE, ICMPULT, ICMPULE,
77 ICMPSGT, ICMPSGE, ICMPSLT, ICMPSLE, FCMPOEQ,
78 FCMPOGT, FCMPOGE, FCMPOLT, FCMPOLE, FCMPONE,
79 FCMPORD, FCMPUNO, FCMPUEQ, FCMPUGT, FCMPUGE,
Owen Andersonab6ec2e2007-07-24 17:55:58 +000080 FCMPULT, FCMPULE, FCMPUNE, EXTRACT, INSERT,
81 SHUFFLE, SELECT, TRUNC, ZEXT, SEXT, FPTOUI,
Daniel Dunbar7d6781b2009-09-20 02:20:51 +000082 FPTOSI, UITOFP, SITOFP, FPTRUNC, FPEXT,
Owen Anderson69057b82008-05-13 08:17:22 +000083 PTRTOINT, INTTOPTR, BITCAST, GEP, CALL, CONSTANT,
Owen Anderson168ad692009-10-19 22:14:22 +000084 INSERTVALUE, EXTRACTVALUE, EMPTY, TOMBSTONE };
Owen Andersonab6ec2e2007-07-24 17:55:58 +000085
86 ExpressionOpcode opcode;
87 const Type* type;
Owen Andersonab6ec2e2007-07-24 17:55:58 +000088 SmallVector<uint32_t, 4> varargs;
Chris Lattner1eefa9c2009-09-21 02:42:51 +000089 Value *function;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +000090
Owen Andersonab6ec2e2007-07-24 17:55:58 +000091 Expression() { }
92 Expression(ExpressionOpcode o) : opcode(o) { }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +000093
Owen Andersonab6ec2e2007-07-24 17:55:58 +000094 bool operator==(const Expression &other) const {
95 if (opcode != other.opcode)
96 return false;
97 else if (opcode == EMPTY || opcode == TOMBSTONE)
98 return true;
99 else if (type != other.type)
100 return false;
Owen Anderson09b83ba2007-10-18 19:39:33 +0000101 else if (function != other.function)
102 return false;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000103 else {
104 if (varargs.size() != other.varargs.size())
105 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000106
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000107 for (size_t i = 0; i < varargs.size(); ++i)
108 if (varargs[i] != other.varargs[i])
109 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000110
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000111 return true;
112 }
113 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000114
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000115 bool operator!=(const Expression &other) const {
Bill Wendling86f01cb2008-12-22 22:16:31 +0000116 return !(*this == other);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000117 }
118 };
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000119
Chris Lattner2dd09db2009-09-02 06:11:42 +0000120 class ValueTable {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000121 private:
122 DenseMap<Value*, uint32_t> valueNumbering;
123 DenseMap<Expression, uint32_t> expressionNumbering;
Owen Andersonf7928602008-05-12 20:15:55 +0000124 AliasAnalysis* AA;
125 MemoryDependenceAnalysis* MD;
126 DominatorTree* DT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000127
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000128 uint32_t nextValueNumber;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000129
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000130 Expression::ExpressionOpcode getOpcode(BinaryOperator* BO);
131 Expression::ExpressionOpcode getOpcode(CmpInst* C);
132 Expression::ExpressionOpcode getOpcode(CastInst* C);
133 Expression create_expression(BinaryOperator* BO);
134 Expression create_expression(CmpInst* C);
135 Expression create_expression(ShuffleVectorInst* V);
136 Expression create_expression(ExtractElementInst* C);
137 Expression create_expression(InsertElementInst* V);
138 Expression create_expression(SelectInst* V);
139 Expression create_expression(CastInst* C);
140 Expression create_expression(GetElementPtrInst* G);
Owen Anderson09b83ba2007-10-18 19:39:33 +0000141 Expression create_expression(CallInst* C);
Owen Anderson69057b82008-05-13 08:17:22 +0000142 Expression create_expression(Constant* C);
Owen Anderson168ad692009-10-19 22:14:22 +0000143 Expression create_expression(ExtractValueInst* C);
144 Expression create_expression(InsertValueInst* C);
145
146 uint32_t lookup_or_add_call(CallInst* C);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000147 public:
Dan Gohmanc4971722009-04-01 16:37:47 +0000148 ValueTable() : nextValueNumber(1) { }
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000149 uint32_t lookup_or_add(Value *V);
150 uint32_t lookup(Value *V) const;
151 void add(Value *V, uint32_t num);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000152 void clear();
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000153 void erase(Value *v);
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000154 unsigned size();
Owen Andersonf7928602008-05-12 20:15:55 +0000155 void setAliasAnalysis(AliasAnalysis* A) { AA = A; }
Chris Lattner8541ede2008-12-01 00:40:32 +0000156 AliasAnalysis *getAliasAnalysis() const { return AA; }
Owen Andersonf7928602008-05-12 20:15:55 +0000157 void setMemDep(MemoryDependenceAnalysis* M) { MD = M; }
158 void setDomTree(DominatorTree* D) { DT = D; }
Owen Anderson3ea90a72008-07-03 17:44:33 +0000159 uint32_t getNextUnusedValueNumber() { return nextValueNumber; }
Bill Wendling6b18a392008-12-22 21:36:08 +0000160 void verifyRemoved(const Value *) const;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000161 };
162}
163
164namespace llvm {
Chris Lattner0625bd62007-09-17 18:34:04 +0000165template <> struct DenseMapInfo<Expression> {
Owen Anderson9699a6e2007-08-02 18:16:06 +0000166 static inline Expression getEmptyKey() {
167 return Expression(Expression::EMPTY);
168 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000169
Owen Anderson9699a6e2007-08-02 18:16:06 +0000170 static inline Expression getTombstoneKey() {
171 return Expression(Expression::TOMBSTONE);
172 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000173
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000174 static unsigned getHashValue(const Expression e) {
175 unsigned hash = e.opcode;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000176
Anton Korobeynikov1bfd1212008-02-20 11:26:25 +0000177 hash = ((unsigned)((uintptr_t)e.type >> 4) ^
Owen Anderson168ad692009-10-19 22:14:22 +0000178 (unsigned)((uintptr_t)e.type >> 9));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000179
Owen Anderson9699a6e2007-08-02 18:16:06 +0000180 for (SmallVector<uint32_t, 4>::const_iterator I = e.varargs.begin(),
181 E = e.varargs.end(); I != E; ++I)
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000182 hash = *I + hash * 37;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000183
Anton Korobeynikov1bfd1212008-02-20 11:26:25 +0000184 hash = ((unsigned)((uintptr_t)e.function >> 4) ^
185 (unsigned)((uintptr_t)e.function >> 9)) +
186 hash * 37;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000187
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000188 return hash;
189 }
Chris Lattner0625bd62007-09-17 18:34:04 +0000190 static bool isEqual(const Expression &LHS, const Expression &RHS) {
191 return LHS == RHS;
192 }
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000193 static bool isPod() { return true; }
194};
195}
196
197//===----------------------------------------------------------------------===//
198// ValueTable Internal Functions
199//===----------------------------------------------------------------------===//
Chris Lattner2876a642008-03-21 21:14:38 +0000200Expression::ExpressionOpcode ValueTable::getOpcode(BinaryOperator* BO) {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000201 switch(BO->getOpcode()) {
Chris Lattner2876a642008-03-21 21:14:38 +0000202 default: // THIS SHOULD NEVER HAPPEN
Torok Edwinfbcc6632009-07-14 16:55:14 +0000203 llvm_unreachable("Binary operator with unknown opcode?");
Chris Lattner2876a642008-03-21 21:14:38 +0000204 case Instruction::Add: return Expression::ADD;
Dan Gohmana5b96452009-06-04 22:49:04 +0000205 case Instruction::FAdd: return Expression::FADD;
Chris Lattner2876a642008-03-21 21:14:38 +0000206 case Instruction::Sub: return Expression::SUB;
Dan Gohmana5b96452009-06-04 22:49:04 +0000207 case Instruction::FSub: return Expression::FSUB;
Chris Lattner2876a642008-03-21 21:14:38 +0000208 case Instruction::Mul: return Expression::MUL;
Dan Gohmana5b96452009-06-04 22:49:04 +0000209 case Instruction::FMul: return Expression::FMUL;
Chris Lattner2876a642008-03-21 21:14:38 +0000210 case Instruction::UDiv: return Expression::UDIV;
211 case Instruction::SDiv: return Expression::SDIV;
212 case Instruction::FDiv: return Expression::FDIV;
213 case Instruction::URem: return Expression::UREM;
214 case Instruction::SRem: return Expression::SREM;
215 case Instruction::FRem: return Expression::FREM;
216 case Instruction::Shl: return Expression::SHL;
217 case Instruction::LShr: return Expression::LSHR;
218 case Instruction::AShr: return Expression::ASHR;
219 case Instruction::And: return Expression::AND;
220 case Instruction::Or: return Expression::OR;
221 case Instruction::Xor: return Expression::XOR;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000222 }
223}
224
225Expression::ExpressionOpcode ValueTable::getOpcode(CmpInst* C) {
Nick Lewyckya21d3da2009-07-08 03:04:38 +0000226 if (isa<ICmpInst>(C)) {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000227 switch (C->getPredicate()) {
Chris Lattner2876a642008-03-21 21:14:38 +0000228 default: // THIS SHOULD NEVER HAPPEN
Torok Edwinfbcc6632009-07-14 16:55:14 +0000229 llvm_unreachable("Comparison with unknown predicate?");
Chris Lattner2876a642008-03-21 21:14:38 +0000230 case ICmpInst::ICMP_EQ: return Expression::ICMPEQ;
231 case ICmpInst::ICMP_NE: return Expression::ICMPNE;
232 case ICmpInst::ICMP_UGT: return Expression::ICMPUGT;
233 case ICmpInst::ICMP_UGE: return Expression::ICMPUGE;
234 case ICmpInst::ICMP_ULT: return Expression::ICMPULT;
235 case ICmpInst::ICMP_ULE: return Expression::ICMPULE;
236 case ICmpInst::ICMP_SGT: return Expression::ICMPSGT;
237 case ICmpInst::ICMP_SGE: return Expression::ICMPSGE;
238 case ICmpInst::ICMP_SLT: return Expression::ICMPSLT;
239 case ICmpInst::ICMP_SLE: return Expression::ICMPSLE;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000240 }
Nick Lewyckya21d3da2009-07-08 03:04:38 +0000241 } else {
242 switch (C->getPredicate()) {
243 default: // THIS SHOULD NEVER HAPPEN
Torok Edwinfbcc6632009-07-14 16:55:14 +0000244 llvm_unreachable("Comparison with unknown predicate?");
Nick Lewyckya21d3da2009-07-08 03:04:38 +0000245 case FCmpInst::FCMP_OEQ: return Expression::FCMPOEQ;
246 case FCmpInst::FCMP_OGT: return Expression::FCMPOGT;
247 case FCmpInst::FCMP_OGE: return Expression::FCMPOGE;
248 case FCmpInst::FCMP_OLT: return Expression::FCMPOLT;
249 case FCmpInst::FCMP_OLE: return Expression::FCMPOLE;
250 case FCmpInst::FCMP_ONE: return Expression::FCMPONE;
251 case FCmpInst::FCMP_ORD: return Expression::FCMPORD;
252 case FCmpInst::FCMP_UNO: return Expression::FCMPUNO;
253 case FCmpInst::FCMP_UEQ: return Expression::FCMPUEQ;
254 case FCmpInst::FCMP_UGT: return Expression::FCMPUGT;
255 case FCmpInst::FCMP_UGE: return Expression::FCMPUGE;
256 case FCmpInst::FCMP_ULT: return Expression::FCMPULT;
257 case FCmpInst::FCMP_ULE: return Expression::FCMPULE;
258 case FCmpInst::FCMP_UNE: return Expression::FCMPUNE;
259 }
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000260 }
261}
262
Chris Lattner2876a642008-03-21 21:14:38 +0000263Expression::ExpressionOpcode ValueTable::getOpcode(CastInst* C) {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000264 switch(C->getOpcode()) {
Chris Lattner2876a642008-03-21 21:14:38 +0000265 default: // THIS SHOULD NEVER HAPPEN
Torok Edwinfbcc6632009-07-14 16:55:14 +0000266 llvm_unreachable("Cast operator with unknown opcode?");
Chris Lattner2876a642008-03-21 21:14:38 +0000267 case Instruction::Trunc: return Expression::TRUNC;
268 case Instruction::ZExt: return Expression::ZEXT;
269 case Instruction::SExt: return Expression::SEXT;
270 case Instruction::FPToUI: return Expression::FPTOUI;
271 case Instruction::FPToSI: return Expression::FPTOSI;
272 case Instruction::UIToFP: return Expression::UITOFP;
273 case Instruction::SIToFP: return Expression::SITOFP;
274 case Instruction::FPTrunc: return Expression::FPTRUNC;
275 case Instruction::FPExt: return Expression::FPEXT;
276 case Instruction::PtrToInt: return Expression::PTRTOINT;
277 case Instruction::IntToPtr: return Expression::INTTOPTR;
278 case Instruction::BitCast: return Expression::BITCAST;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000279 }
280}
281
Owen Anderson09b83ba2007-10-18 19:39:33 +0000282Expression ValueTable::create_expression(CallInst* C) {
283 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000284
Owen Anderson09b83ba2007-10-18 19:39:33 +0000285 e.type = C->getType();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000286 e.function = C->getCalledFunction();
287 e.opcode = Expression::CALL;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000288
Owen Anderson09b83ba2007-10-18 19:39:33 +0000289 for (CallInst::op_iterator I = C->op_begin()+1, E = C->op_end();
290 I != E; ++I)
Owen Anderson1e73f292008-04-11 05:11:49 +0000291 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000292
Owen Anderson09b83ba2007-10-18 19:39:33 +0000293 return e;
294}
295
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000296Expression ValueTable::create_expression(BinaryOperator* BO) {
297 Expression e;
Owen Anderson168ad692009-10-19 22:14:22 +0000298 e.varargs.push_back(lookup_or_add(BO->getOperand(0)));
299 e.varargs.push_back(lookup_or_add(BO->getOperand(1)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000300 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000301 e.type = BO->getType();
302 e.opcode = getOpcode(BO);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000303
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000304 return e;
305}
306
307Expression ValueTable::create_expression(CmpInst* C) {
308 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000309
Owen Anderson168ad692009-10-19 22:14:22 +0000310 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
311 e.varargs.push_back(lookup_or_add(C->getOperand(1)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000312 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000313 e.type = C->getType();
314 e.opcode = getOpcode(C);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000315
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000316 return e;
317}
318
319Expression ValueTable::create_expression(CastInst* C) {
320 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000321
Owen Anderson168ad692009-10-19 22:14:22 +0000322 e.varargs.push_back(lookup_or_add(C->getOperand(0)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000323 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000324 e.type = C->getType();
325 e.opcode = getOpcode(C);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000326
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000327 return e;
328}
329
330Expression ValueTable::create_expression(ShuffleVectorInst* S) {
331 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000332
Owen Anderson168ad692009-10-19 22:14:22 +0000333 e.varargs.push_back(lookup_or_add(S->getOperand(0)));
334 e.varargs.push_back(lookup_or_add(S->getOperand(1)));
335 e.varargs.push_back(lookup_or_add(S->getOperand(2)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000336 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000337 e.type = S->getType();
338 e.opcode = Expression::SHUFFLE;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000339
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000340 return e;
341}
342
343Expression ValueTable::create_expression(ExtractElementInst* E) {
344 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000345
Owen Anderson168ad692009-10-19 22:14:22 +0000346 e.varargs.push_back(lookup_or_add(E->getOperand(0)));
347 e.varargs.push_back(lookup_or_add(E->getOperand(1)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000348 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000349 e.type = E->getType();
350 e.opcode = Expression::EXTRACT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000351
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000352 return e;
353}
354
355Expression ValueTable::create_expression(InsertElementInst* I) {
356 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000357
Owen Anderson168ad692009-10-19 22:14:22 +0000358 e.varargs.push_back(lookup_or_add(I->getOperand(0)));
359 e.varargs.push_back(lookup_or_add(I->getOperand(1)));
360 e.varargs.push_back(lookup_or_add(I->getOperand(2)));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000361 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000362 e.type = I->getType();
363 e.opcode = Expression::INSERT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000364
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000365 return e;
366}
367
368Expression ValueTable::create_expression(SelectInst* I) {
369 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000370
Owen Anderson168ad692009-10-19 22:14:22 +0000371 e.varargs.push_back(lookup_or_add(I->getCondition()));
372 e.varargs.push_back(lookup_or_add(I->getTrueValue()));
373 e.varargs.push_back(lookup_or_add(I->getFalseValue()));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000374 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000375 e.type = I->getType();
376 e.opcode = Expression::SELECT;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000377
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000378 return e;
379}
380
381Expression ValueTable::create_expression(GetElementPtrInst* G) {
382 Expression e;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000383
Owen Anderson168ad692009-10-19 22:14:22 +0000384 e.varargs.push_back(lookup_or_add(G->getPointerOperand()));
Owen Anderson09b83ba2007-10-18 19:39:33 +0000385 e.function = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000386 e.type = G->getType();
387 e.opcode = Expression::GEP;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000388
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000389 for (GetElementPtrInst::op_iterator I = G->idx_begin(), E = G->idx_end();
390 I != E; ++I)
Owen Anderson1e73f292008-04-11 05:11:49 +0000391 e.varargs.push_back(lookup_or_add(*I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000392
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000393 return e;
394}
395
Owen Anderson168ad692009-10-19 22:14:22 +0000396Expression ValueTable::create_expression(ExtractValueInst* E) {
397 Expression e;
398
399 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
400 for (ExtractValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
401 II != IE; ++II)
402 e.varargs.push_back(*II);
403 e.function = 0;
404 e.type = E->getType();
405 e.opcode = Expression::EXTRACTVALUE;
406
407 return e;
408}
409
410Expression ValueTable::create_expression(InsertValueInst* E) {
411 Expression e;
412
413 e.varargs.push_back(lookup_or_add(E->getAggregateOperand()));
414 e.varargs.push_back(lookup_or_add(E->getInsertedValueOperand()));
415 for (InsertValueInst::idx_iterator II = E->idx_begin(), IE = E->idx_end();
416 II != IE; ++II)
417 e.varargs.push_back(*II);
418 e.function = 0;
419 e.type = E->getType();
420 e.opcode = Expression::INSERTVALUE;
421
422 return e;
423}
424
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000425//===----------------------------------------------------------------------===//
426// ValueTable External Functions
427//===----------------------------------------------------------------------===//
428
Owen Anderson6a903bc2008-06-18 21:41:49 +0000429/// add - Insert a value into the table with a specified value number.
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000430void ValueTable::add(Value *V, uint32_t num) {
Owen Anderson6a903bc2008-06-18 21:41:49 +0000431 valueNumbering.insert(std::make_pair(V, num));
432}
433
Owen Anderson168ad692009-10-19 22:14:22 +0000434uint32_t ValueTable::lookup_or_add_call(CallInst* C) {
435 if (AA->doesNotAccessMemory(C)) {
436 Expression exp = create_expression(C);
437 uint32_t& e = expressionNumbering[exp];
438 if (!e) e = nextValueNumber++;
439 valueNumbering[C] = e;
440 return e;
441 } else if (AA->onlyReadsMemory(C)) {
442 Expression exp = create_expression(C);
443 uint32_t& e = expressionNumbering[exp];
444 if (!e) {
445 e = nextValueNumber++;
446 valueNumbering[C] = e;
447 return e;
448 }
Dan Gohman81132462009-11-14 02:27:51 +0000449 if (!MD) {
450 e = nextValueNumber++;
451 valueNumbering[C] = e;
452 return e;
453 }
Owen Anderson168ad692009-10-19 22:14:22 +0000454
455 MemDepResult local_dep = MD->getDependency(C);
456
457 if (!local_dep.isDef() && !local_dep.isNonLocal()) {
458 valueNumbering[C] = nextValueNumber;
459 return nextValueNumber++;
460 }
461
462 if (local_dep.isDef()) {
463 CallInst* local_cdep = cast<CallInst>(local_dep.getInst());
464
465 if (local_cdep->getNumOperands() != C->getNumOperands()) {
466 valueNumbering[C] = nextValueNumber;
467 return nextValueNumber++;
468 }
469
470 for (unsigned i = 1; i < C->getNumOperands(); ++i) {
471 uint32_t c_vn = lookup_or_add(C->getOperand(i));
472 uint32_t cd_vn = lookup_or_add(local_cdep->getOperand(i));
473 if (c_vn != cd_vn) {
474 valueNumbering[C] = nextValueNumber;
475 return nextValueNumber++;
476 }
477 }
478
479 uint32_t v = lookup_or_add(local_cdep);
480 valueNumbering[C] = v;
481 return v;
482 }
483
484 // Non-local case.
485 const MemoryDependenceAnalysis::NonLocalDepInfo &deps =
486 MD->getNonLocalCallDependency(CallSite(C));
487 // FIXME: call/call dependencies for readonly calls should return def, not
488 // clobber! Move the checking logic to MemDep!
489 CallInst* cdep = 0;
490
491 // Check to see if we have a single dominating call instruction that is
492 // identical to C.
493 for (unsigned i = 0, e = deps.size(); i != e; ++i) {
494 const MemoryDependenceAnalysis::NonLocalDepEntry *I = &deps[i];
495 // Ignore non-local dependencies.
496 if (I->second.isNonLocal())
497 continue;
498
499 // We don't handle non-depedencies. If we already have a call, reject
500 // instruction dependencies.
501 if (I->second.isClobber() || cdep != 0) {
502 cdep = 0;
503 break;
504 }
505
506 CallInst *NonLocalDepCall = dyn_cast<CallInst>(I->second.getInst());
507 // FIXME: All duplicated with non-local case.
508 if (NonLocalDepCall && DT->properlyDominates(I->first, C->getParent())){
509 cdep = NonLocalDepCall;
510 continue;
511 }
512
513 cdep = 0;
514 break;
515 }
516
517 if (!cdep) {
518 valueNumbering[C] = nextValueNumber;
519 return nextValueNumber++;
520 }
521
522 if (cdep->getNumOperands() != C->getNumOperands()) {
523 valueNumbering[C] = nextValueNumber;
524 return nextValueNumber++;
525 }
526 for (unsigned i = 1; i < C->getNumOperands(); ++i) {
527 uint32_t c_vn = lookup_or_add(C->getOperand(i));
528 uint32_t cd_vn = lookup_or_add(cdep->getOperand(i));
529 if (c_vn != cd_vn) {
530 valueNumbering[C] = nextValueNumber;
531 return nextValueNumber++;
532 }
533 }
534
535 uint32_t v = lookup_or_add(cdep);
536 valueNumbering[C] = v;
537 return v;
538
539 } else {
540 valueNumbering[C] = nextValueNumber;
541 return nextValueNumber++;
542 }
543}
544
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000545/// lookup_or_add - Returns the value number for the specified value, assigning
546/// it a new number if it did not have one before.
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000547uint32_t ValueTable::lookup_or_add(Value *V) {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000548 DenseMap<Value*, uint32_t>::iterator VI = valueNumbering.find(V);
549 if (VI != valueNumbering.end())
550 return VI->second;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000551
Owen Anderson168ad692009-10-19 22:14:22 +0000552 if (!isa<Instruction>(V)) {
Owen Anderson1059b5b2009-10-19 21:14:57 +0000553 valueNumbering[V] = nextValueNumber;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000554 return nextValueNumber++;
555 }
Owen Anderson168ad692009-10-19 22:14:22 +0000556
557 Instruction* I = cast<Instruction>(V);
558 Expression exp;
559 switch (I->getOpcode()) {
560 case Instruction::Call:
561 return lookup_or_add_call(cast<CallInst>(I));
562 case Instruction::Add:
563 case Instruction::FAdd:
564 case Instruction::Sub:
565 case Instruction::FSub:
566 case Instruction::Mul:
567 case Instruction::FMul:
568 case Instruction::UDiv:
569 case Instruction::SDiv:
570 case Instruction::FDiv:
571 case Instruction::URem:
572 case Instruction::SRem:
573 case Instruction::FRem:
574 case Instruction::Shl:
575 case Instruction::LShr:
576 case Instruction::AShr:
577 case Instruction::And:
578 case Instruction::Or :
579 case Instruction::Xor:
580 exp = create_expression(cast<BinaryOperator>(I));
581 break;
582 case Instruction::ICmp:
583 case Instruction::FCmp:
584 exp = create_expression(cast<CmpInst>(I));
585 break;
586 case Instruction::Trunc:
587 case Instruction::ZExt:
588 case Instruction::SExt:
589 case Instruction::FPToUI:
590 case Instruction::FPToSI:
591 case Instruction::UIToFP:
592 case Instruction::SIToFP:
593 case Instruction::FPTrunc:
594 case Instruction::FPExt:
595 case Instruction::PtrToInt:
596 case Instruction::IntToPtr:
597 case Instruction::BitCast:
598 exp = create_expression(cast<CastInst>(I));
599 break;
600 case Instruction::Select:
601 exp = create_expression(cast<SelectInst>(I));
602 break;
603 case Instruction::ExtractElement:
604 exp = create_expression(cast<ExtractElementInst>(I));
605 break;
606 case Instruction::InsertElement:
607 exp = create_expression(cast<InsertElementInst>(I));
608 break;
609 case Instruction::ShuffleVector:
610 exp = create_expression(cast<ShuffleVectorInst>(I));
611 break;
612 case Instruction::ExtractValue:
613 exp = create_expression(cast<ExtractValueInst>(I));
614 break;
615 case Instruction::InsertValue:
616 exp = create_expression(cast<InsertValueInst>(I));
617 break;
618 case Instruction::GetElementPtr:
619 exp = create_expression(cast<GetElementPtrInst>(I));
620 break;
621 default:
622 valueNumbering[V] = nextValueNumber;
623 return nextValueNumber++;
624 }
625
626 uint32_t& e = expressionNumbering[exp];
627 if (!e) e = nextValueNumber++;
628 valueNumbering[V] = e;
629 return e;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000630}
631
632/// lookup - Returns the value number of the specified value. Fails if
633/// the value has not yet been numbered.
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000634uint32_t ValueTable::lookup(Value *V) const {
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +0000635 DenseMap<Value*, uint32_t>::const_iterator VI = valueNumbering.find(V);
Chris Lattner2876a642008-03-21 21:14:38 +0000636 assert(VI != valueNumbering.end() && "Value not numbered?");
637 return VI->second;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000638}
639
640/// clear - Remove all entries from the ValueTable
641void ValueTable::clear() {
642 valueNumbering.clear();
643 expressionNumbering.clear();
644 nextValueNumber = 1;
645}
646
Owen Anderson10ffa862007-07-31 23:27:13 +0000647/// erase - Remove a value from the value numbering
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000648void ValueTable::erase(Value *V) {
Owen Anderson10ffa862007-07-31 23:27:13 +0000649 valueNumbering.erase(V);
650}
651
Bill Wendling6b18a392008-12-22 21:36:08 +0000652/// verifyRemoved - Verify that the value is removed from all internal data
653/// structures.
654void ValueTable::verifyRemoved(const Value *V) const {
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +0000655 for (DenseMap<Value*, uint32_t>::const_iterator
Bill Wendling6b18a392008-12-22 21:36:08 +0000656 I = valueNumbering.begin(), E = valueNumbering.end(); I != E; ++I) {
657 assert(I->first != V && "Inst still occurs in value numbering map!");
658 }
659}
660
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000661//===----------------------------------------------------------------------===//
Bill Wendling456e8852008-12-22 22:32:22 +0000662// GVN Pass
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000663//===----------------------------------------------------------------------===//
664
665namespace {
Chris Lattner2dd09db2009-09-02 06:11:42 +0000666 struct ValueNumberScope {
Owen Anderson1b3ea962008-06-20 01:15:47 +0000667 ValueNumberScope* parent;
668 DenseMap<uint32_t, Value*> table;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000669
Owen Anderson1b3ea962008-06-20 01:15:47 +0000670 ValueNumberScope(ValueNumberScope* p) : parent(p) { }
671 };
672}
673
674namespace {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000675
Chris Lattner2dd09db2009-09-02 06:11:42 +0000676 class GVN : public FunctionPass {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000677 bool runOnFunction(Function &F);
678 public:
679 static char ID; // Pass identification, replacement for typeid
Dan Gohman81132462009-11-14 02:27:51 +0000680 explicit GVN(bool nopre = false, bool noloads = false)
681 : FunctionPass(&ID), NoPRE(nopre), NoLoads(noloads), MD(0) { }
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000682
683 private:
Evan Cheng5a6b9c42009-10-30 20:12:24 +0000684 bool NoPRE;
Dan Gohman81132462009-11-14 02:27:51 +0000685 bool NoLoads;
Chris Lattner8541ede2008-12-01 00:40:32 +0000686 MemoryDependenceAnalysis *MD;
687 DominatorTree *DT;
688
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000689 ValueTable VN;
Owen Anderson1b3ea962008-06-20 01:15:47 +0000690 DenseMap<BasicBlock*, ValueNumberScope*> localAvail;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000691
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000692 // This transformation requires dominator postdominator info
693 virtual void getAnalysisUsage(AnalysisUsage &AU) const {
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000694 AU.addRequired<DominatorTree>();
Dan Gohman81132462009-11-14 02:27:51 +0000695 if (!NoLoads)
696 AU.addRequired<MemoryDependenceAnalysis>();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000697 AU.addRequired<AliasAnalysis>();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000698
Owen Anderson54e02192008-06-23 17:49:45 +0000699 AU.addPreserved<DominatorTree>();
Owen Anderson09b83ba2007-10-18 19:39:33 +0000700 AU.addPreserved<AliasAnalysis>();
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000701 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000702
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000703 // Helper fuctions
704 // FIXME: eliminate or document these better
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000705 bool processLoad(LoadInst* L,
Chris Lattner804209d2008-03-21 22:01:16 +0000706 SmallVectorImpl<Instruction*> &toErase);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000707 bool processInstruction(Instruction *I,
Chris Lattner804209d2008-03-21 22:01:16 +0000708 SmallVectorImpl<Instruction*> &toErase);
Owen Anderson9699a6e2007-08-02 18:16:06 +0000709 bool processNonLocalLoad(LoadInst* L,
Chris Lattner804209d2008-03-21 22:01:16 +0000710 SmallVectorImpl<Instruction*> &toErase);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000711 bool processBlock(BasicBlock *BB);
Owen Anderson6a903bc2008-06-18 21:41:49 +0000712 void dump(DenseMap<uint32_t, Value*>& d);
Owen Anderson676070d2007-08-14 18:04:11 +0000713 bool iterateOnFunction(Function &F);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000714 Value *CollapsePhi(PHINode* p);
Owen Anderson6a903bc2008-06-18 21:41:49 +0000715 bool performPRE(Function& F);
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000716 Value *lookupNumber(BasicBlock *BB, uint32_t num);
Nuno Lopese3127f32008-10-10 16:25:50 +0000717 void cleanupGlobalSets();
Bill Wendling6b18a392008-12-22 21:36:08 +0000718 void verifyRemoved(const Instruction *I) const;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000719 };
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000720
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000721 char GVN::ID = 0;
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000722}
723
724// createGVNPass - The public interface to this file...
Dan Gohman81132462009-11-14 02:27:51 +0000725FunctionPass *llvm::createGVNPass(bool NoPRE, bool NoLoads) {
726 return new GVN(NoPRE, NoLoads);
727}
Owen Andersonab6ec2e2007-07-24 17:55:58 +0000728
729static RegisterPass<GVN> X("gvn",
730 "Global Value Numbering");
731
Owen Anderson6a903bc2008-06-18 21:41:49 +0000732void GVN::dump(DenseMap<uint32_t, Value*>& d) {
Owen Anderson5e5599b2007-07-25 19:57:03 +0000733 printf("{\n");
Owen Anderson6a903bc2008-06-18 21:41:49 +0000734 for (DenseMap<uint32_t, Value*>::iterator I = d.begin(),
Owen Anderson5e5599b2007-07-25 19:57:03 +0000735 E = d.end(); I != E; ++I) {
Owen Anderson6a903bc2008-06-18 21:41:49 +0000736 printf("%d\n", I->first);
Owen Anderson5e5599b2007-07-25 19:57:03 +0000737 I->second->dump();
738 }
739 printf("}\n");
740}
741
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000742static bool isSafeReplacement(PHINode* p, Instruction *inst) {
Owen Anderson109ca5a2009-08-26 22:55:11 +0000743 if (!isa<PHINode>(inst))
744 return true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000745
Owen Anderson109ca5a2009-08-26 22:55:11 +0000746 for (Instruction::use_iterator UI = p->use_begin(), E = p->use_end();
747 UI != E; ++UI)
748 if (PHINode* use_phi = dyn_cast<PHINode>(UI))
749 if (use_phi->getParent() == inst->getParent())
750 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000751
Owen Anderson109ca5a2009-08-26 22:55:11 +0000752 return true;
753}
754
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000755Value *GVN::CollapsePhi(PHINode *PN) {
756 Value *ConstVal = PN->hasConstantValue(DT);
757 if (!ConstVal) return 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000758
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000759 Instruction *Inst = dyn_cast<Instruction>(ConstVal);
760 if (!Inst)
761 return ConstVal;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000762
Chris Lattner1eefa9c2009-09-21 02:42:51 +0000763 if (DT->dominates(Inst, PN))
764 if (isSafeReplacement(PN, Inst))
765 return Inst;
Owen Andersonf5023a72007-08-16 22:51:56 +0000766 return 0;
767}
Owen Anderson5e5599b2007-07-25 19:57:03 +0000768
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000769/// IsValueFullyAvailableInBlock - Return true if we can prove that the value
770/// we're analyzing is fully available in the specified block. As we go, keep
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000771/// track of which blocks we know are fully alive in FullyAvailableBlocks. This
772/// map is actually a tri-state map with the following values:
773/// 0) we know the block *is not* fully available.
774/// 1) we know the block *is* fully available.
775/// 2) we do not know whether the block is fully available or not, but we are
776/// currently speculating that it will be.
777/// 3) we are speculating for this block and have used that to speculate for
778/// other blocks.
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000779static bool IsValueFullyAvailableInBlock(BasicBlock *BB,
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000780 DenseMap<BasicBlock*, char> &FullyAvailableBlocks) {
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000781 // Optimistically assume that the block is fully available and check to see
782 // if we already know about this block in one lookup.
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000783 std::pair<DenseMap<BasicBlock*, char>::iterator, char> IV =
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000784 FullyAvailableBlocks.insert(std::make_pair(BB, 2));
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000785
786 // If the entry already existed for this block, return the precomputed value.
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000787 if (!IV.second) {
788 // If this is a speculative "available" value, mark it as being used for
789 // speculation of other blocks.
790 if (IV.first->second == 2)
791 IV.first->second = 3;
792 return IV.first->second != 0;
793 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000794
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000795 // Otherwise, see if it is fully available in all predecessors.
796 pred_iterator PI = pred_begin(BB), PE = pred_end(BB);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000797
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000798 // If this block has no predecessors, it isn't live-in here.
799 if (PI == PE)
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000800 goto SpeculationFailure;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000801
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000802 for (; PI != PE; ++PI)
803 // If the value isn't fully available in one of our predecessors, then it
804 // isn't fully available in this block either. Undo our previous
805 // optimistic assumption and bail out.
806 if (!IsValueFullyAvailableInBlock(*PI, FullyAvailableBlocks))
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000807 goto SpeculationFailure;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000808
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000809 return true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000810
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000811// SpeculationFailure - If we get here, we found out that this is not, after
812// all, a fully-available block. We have a problem if we speculated on this and
813// used the speculation to mark other blocks as available.
814SpeculationFailure:
815 char &BBVal = FullyAvailableBlocks[BB];
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000816
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000817 // If we didn't speculate on this, just return with it set to false.
818 if (BBVal == 2) {
819 BBVal = 0;
820 return false;
821 }
822
823 // If we did speculate on this value, we could have blocks set to 1 that are
824 // incorrect. Walk the (transitive) successors of this block and mark them as
825 // 0 if set to one.
826 SmallVector<BasicBlock*, 32> BBWorklist;
827 BBWorklist.push_back(BB);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000828
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000829 while (!BBWorklist.empty()) {
830 BasicBlock *Entry = BBWorklist.pop_back_val();
831 // Note that this sets blocks to 0 (unavailable) if they happen to not
832 // already be in FullyAvailableBlocks. This is safe.
833 char &EntryVal = FullyAvailableBlocks[Entry];
834 if (EntryVal == 0) continue; // Already unavailable.
835
836 // Mark as unavailable.
837 EntryVal = 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000838
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000839 for (succ_iterator I = succ_begin(Entry), E = succ_end(Entry); I != E; ++I)
840 BBWorklist.push_back(*I);
841 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +0000842
Chris Lattnerd2a653a2008-12-05 07:49:08 +0000843 return false;
Chris Lattner1db9bbe2008-12-02 08:16:11 +0000844}
845
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000846
Chris Lattner9045f232009-09-21 17:24:04 +0000847/// CanCoerceMustAliasedValueToLoad - Return true if
848/// CoerceAvailableValueToLoadType will succeed.
849static bool CanCoerceMustAliasedValueToLoad(Value *StoredVal,
850 const Type *LoadTy,
851 const TargetData &TD) {
852 // If the loaded or stored value is an first class array or struct, don't try
853 // to transform them. We need to be able to bitcast to integer.
854 if (isa<StructType>(LoadTy) || isa<ArrayType>(LoadTy) ||
855 isa<StructType>(StoredVal->getType()) ||
856 isa<ArrayType>(StoredVal->getType()))
857 return false;
858
859 // The store has to be at least as big as the load.
860 if (TD.getTypeSizeInBits(StoredVal->getType()) <
861 TD.getTypeSizeInBits(LoadTy))
862 return false;
863
864 return true;
865}
866
867
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000868/// CoerceAvailableValueToLoadType - If we saw a store of a value to memory, and
869/// then a load from a must-aliased pointer of a different type, try to coerce
870/// the stored value. LoadedTy is the type of the load we want to replace and
871/// InsertPt is the place to insert new instructions.
872///
873/// If we can't do it, return null.
874static Value *CoerceAvailableValueToLoadType(Value *StoredVal,
875 const Type *LoadedTy,
876 Instruction *InsertPt,
877 const TargetData &TD) {
Chris Lattner9045f232009-09-21 17:24:04 +0000878 if (!CanCoerceMustAliasedValueToLoad(StoredVal, LoadedTy, TD))
879 return 0;
880
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000881 const Type *StoredValTy = StoredVal->getType();
882
883 uint64_t StoreSize = TD.getTypeSizeInBits(StoredValTy);
884 uint64_t LoadSize = TD.getTypeSizeInBits(LoadedTy);
885
886 // If the store and reload are the same size, we can always reuse it.
887 if (StoreSize == LoadSize) {
888 if (isa<PointerType>(StoredValTy) && isa<PointerType>(LoadedTy)) {
889 // Pointer to Pointer -> use bitcast.
890 return new BitCastInst(StoredVal, LoadedTy, "", InsertPt);
891 }
892
893 // Convert source pointers to integers, which can be bitcast.
894 if (isa<PointerType>(StoredValTy)) {
895 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
896 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
897 }
898
899 const Type *TypeToCastTo = LoadedTy;
900 if (isa<PointerType>(TypeToCastTo))
901 TypeToCastTo = TD.getIntPtrType(StoredValTy->getContext());
902
903 if (StoredValTy != TypeToCastTo)
904 StoredVal = new BitCastInst(StoredVal, TypeToCastTo, "", InsertPt);
905
906 // Cast to pointer if the load needs a pointer type.
907 if (isa<PointerType>(LoadedTy))
908 StoredVal = new IntToPtrInst(StoredVal, LoadedTy, "", InsertPt);
909
910 return StoredVal;
911 }
912
913 // If the loaded value is smaller than the available value, then we can
914 // extract out a piece from it. If the available value is too small, then we
915 // can't do anything.
Chris Lattner9045f232009-09-21 17:24:04 +0000916 assert(StoreSize >= LoadSize && "CanCoerceMustAliasedValueToLoad fail");
Chris Lattnera0aa8fb2009-09-20 20:09:34 +0000917
918 // Convert source pointers to integers, which can be manipulated.
919 if (isa<PointerType>(StoredValTy)) {
920 StoredValTy = TD.getIntPtrType(StoredValTy->getContext());
921 StoredVal = new PtrToIntInst(StoredVal, StoredValTy, "", InsertPt);
922 }
923
924 // Convert vectors and fp to integer, which can be manipulated.
925 if (!isa<IntegerType>(StoredValTy)) {
926 StoredValTy = IntegerType::get(StoredValTy->getContext(), StoreSize);
927 StoredVal = new BitCastInst(StoredVal, StoredValTy, "", InsertPt);
928 }
929
930 // If this is a big-endian system, we need to shift the value down to the low
931 // bits so that a truncate will work.
932 if (TD.isBigEndian()) {
933 Constant *Val = ConstantInt::get(StoredVal->getType(), StoreSize-LoadSize);
934 StoredVal = BinaryOperator::CreateLShr(StoredVal, Val, "tmp", InsertPt);
935 }
936
937 // Truncate the integer to the right size now.
938 const Type *NewIntTy = IntegerType::get(StoredValTy->getContext(), LoadSize);
939 StoredVal = new TruncInst(StoredVal, NewIntTy, "trunc", InsertPt);
940
941 if (LoadedTy == NewIntTy)
942 return StoredVal;
943
944 // If the result is a pointer, inttoptr.
945 if (isa<PointerType>(LoadedTy))
946 return new IntToPtrInst(StoredVal, LoadedTy, "inttoptr", InsertPt);
947
948 // Otherwise, bitcast.
949 return new BitCastInst(StoredVal, LoadedTy, "bitcast", InsertPt);
950}
951
Chris Lattnerd28f9082009-09-21 06:24:16 +0000952/// GetBaseWithConstantOffset - Analyze the specified pointer to see if it can
953/// be expressed as a base pointer plus a constant offset. Return the base and
954/// offset to the caller.
955static Value *GetBaseWithConstantOffset(Value *Ptr, int64_t &Offset,
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000956 const TargetData &TD) {
Chris Lattnerd28f9082009-09-21 06:24:16 +0000957 Operator *PtrOp = dyn_cast<Operator>(Ptr);
958 if (PtrOp == 0) return Ptr;
959
960 // Just look through bitcasts.
961 if (PtrOp->getOpcode() == Instruction::BitCast)
962 return GetBaseWithConstantOffset(PtrOp->getOperand(0), Offset, TD);
963
964 // If this is a GEP with constant indices, we can look through it.
965 GEPOperator *GEP = dyn_cast<GEPOperator>(PtrOp);
966 if (GEP == 0 || !GEP->hasAllConstantIndices()) return Ptr;
967
968 gep_type_iterator GTI = gep_type_begin(GEP);
969 for (User::op_iterator I = GEP->idx_begin(), E = GEP->idx_end(); I != E;
970 ++I, ++GTI) {
971 ConstantInt *OpC = cast<ConstantInt>(*I);
972 if (OpC->isZero()) continue;
973
974 // Handle a struct and array indices which add their offset to the pointer.
975 if (const StructType *STy = dyn_cast<StructType>(*GTI)) {
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000976 Offset += TD.getStructLayout(STy)->getElementOffset(OpC->getZExtValue());
Chris Lattnerd28f9082009-09-21 06:24:16 +0000977 } else {
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000978 uint64_t Size = TD.getTypeAllocSize(GTI.getIndexedType());
Chris Lattnerd28f9082009-09-21 06:24:16 +0000979 Offset += OpC->getSExtValue()*Size;
980 }
981 }
982
983 // Re-sign extend from the pointer size if needed to get overflow edge cases
984 // right.
Chris Lattner4d8af2f2009-09-21 06:48:08 +0000985 unsigned PtrSize = TD.getPointerSizeInBits();
Chris Lattnerd28f9082009-09-21 06:24:16 +0000986 if (PtrSize < 64)
987 Offset = (Offset << (64-PtrSize)) >> (64-PtrSize);
988
989 return GetBaseWithConstantOffset(GEP->getPointerOperand(), Offset, TD);
990}
991
992
Chris Lattner42376062009-12-06 01:57:02 +0000993/// AnalyzeLoadFromClobberingWrite - This function is called when we have a
994/// memdep query of a load that ends up being a clobbering memory write (store,
995/// memset, memcpy, memmove). This means that the write *may* provide bits used
996/// by the load but we can't be sure because the pointers don't mustalias.
997///
998/// Check this case to see if there is anything more we can do before we give
999/// up. This returns -1 if we have to give up, or a byte number in the stored
1000/// value of the piece that feeds the load.
1001static int AnalyzeLoadFromClobberingWrite(LoadInst *L, Value *WritePtr,
1002 uint64_t WriteSizeInBits,
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001003 const TargetData &TD) {
Chris Lattner9045f232009-09-21 17:24:04 +00001004 // If the loaded or stored value is an first class array or struct, don't try
1005 // to transform them. We need to be able to bitcast to integer.
Chris Lattner42376062009-12-06 01:57:02 +00001006 if (isa<StructType>(L->getType()) || isa<ArrayType>(L->getType()))
Chris Lattner9045f232009-09-21 17:24:04 +00001007 return -1;
1008
Chris Lattnerd28f9082009-09-21 06:24:16 +00001009 int64_t StoreOffset = 0, LoadOffset = 0;
Chris Lattner42376062009-12-06 01:57:02 +00001010 Value *StoreBase = GetBaseWithConstantOffset(WritePtr, StoreOffset, TD);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001011 Value *LoadBase =
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001012 GetBaseWithConstantOffset(L->getPointerOperand(), LoadOffset, TD);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001013 if (StoreBase != LoadBase)
1014 return -1;
1015
1016 // If the load and store are to the exact same address, they should have been
1017 // a must alias. AA must have gotten confused.
1018 // FIXME: Study to see if/when this happens.
1019 if (LoadOffset == StoreOffset) {
1020#if 0
1021 errs() << "STORE/LOAD DEP WITH COMMON POINTER MISSED:\n"
1022 << "Base = " << *StoreBase << "\n"
Chris Lattner42376062009-12-06 01:57:02 +00001023 << "Store Ptr = " << *WritePtr << "\n"
1024 << "Store Offs = " << StoreOffset << "\n"
Chris Lattnerd28f9082009-09-21 06:24:16 +00001025 << "Load Ptr = " << *L->getPointerOperand() << "\n"
1026 << "Load Offs = " << LoadOffset << " - " << *L << "\n\n";
1027 errs() << "'" << L->getParent()->getParent()->getName() << "'"
1028 << *L->getParent();
Chris Lattner946b58d2009-12-09 02:41:54 +00001029 abort();
Chris Lattnerd28f9082009-09-21 06:24:16 +00001030#endif
1031 return -1;
1032 }
1033
1034 // If the load and store don't overlap at all, the store doesn't provide
1035 // anything to the load. In this case, they really don't alias at all, AA
1036 // must have gotten confused.
1037 // FIXME: Investigate cases where this bails out, e.g. rdar://7238614. Then
1038 // remove this check, as it is duplicated with what we have below.
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001039 uint64_t LoadSize = TD.getTypeSizeInBits(L->getType());
Chris Lattnerd28f9082009-09-21 06:24:16 +00001040
Chris Lattner42376062009-12-06 01:57:02 +00001041 if ((WriteSizeInBits & 7) | (LoadSize & 7))
Chris Lattnerd28f9082009-09-21 06:24:16 +00001042 return -1;
Chris Lattner42376062009-12-06 01:57:02 +00001043 uint64_t StoreSize = WriteSizeInBits >> 3; // Convert to bytes.
Chris Lattnerd28f9082009-09-21 06:24:16 +00001044 LoadSize >>= 3;
1045
1046
1047 bool isAAFailure = false;
1048 if (StoreOffset < LoadOffset) {
1049 isAAFailure = StoreOffset+int64_t(StoreSize) <= LoadOffset;
1050 } else {
1051 isAAFailure = LoadOffset+int64_t(LoadSize) <= StoreOffset;
1052 }
1053 if (isAAFailure) {
1054#if 0
1055 errs() << "STORE LOAD DEP WITH COMMON BASE:\n"
1056 << "Base = " << *StoreBase << "\n"
Chris Lattner42376062009-12-06 01:57:02 +00001057 << "Store Ptr = " << *WritePtr << "\n"
1058 << "Store Offs = " << StoreOffset << "\n"
Chris Lattnerd28f9082009-09-21 06:24:16 +00001059 << "Load Ptr = " << *L->getPointerOperand() << "\n"
1060 << "Load Offs = " << LoadOffset << " - " << *L << "\n\n";
1061 errs() << "'" << L->getParent()->getParent()->getName() << "'"
1062 << *L->getParent();
Chris Lattner946b58d2009-12-09 02:41:54 +00001063 abort();
Chris Lattnerd28f9082009-09-21 06:24:16 +00001064#endif
1065 return -1;
1066 }
1067
1068 // If the Load isn't completely contained within the stored bits, we don't
1069 // have all the bits to feed it. We could do something crazy in the future
1070 // (issue a smaller load then merge the bits in) but this seems unlikely to be
1071 // valuable.
1072 if (StoreOffset > LoadOffset ||
1073 StoreOffset+StoreSize < LoadOffset+LoadSize)
1074 return -1;
1075
1076 // Okay, we can do this transformation. Return the number of bytes into the
1077 // store that the load is.
1078 return LoadOffset-StoreOffset;
1079}
1080
Chris Lattner42376062009-12-06 01:57:02 +00001081/// AnalyzeLoadFromClobberingStore - This function is called when we have a
1082/// memdep query of a load that ends up being a clobbering store.
1083static int AnalyzeLoadFromClobberingStore(LoadInst *L, StoreInst *DepSI,
1084 const TargetData &TD) {
1085 // Cannot handle reading from store of first-class aggregate yet.
1086 if (isa<StructType>(DepSI->getOperand(0)->getType()) ||
1087 isa<ArrayType>(DepSI->getOperand(0)->getType()))
1088 return -1;
1089
1090 Value *StorePtr = DepSI->getPointerOperand();
1091 uint64_t StoreSize = TD.getTypeSizeInBits(StorePtr->getType());
1092 return AnalyzeLoadFromClobberingWrite(L, StorePtr, StoreSize, TD);
1093}
1094
1095static int AnalyzeLoadFromClobberingMemInst(LoadInst *L, MemIntrinsic *MI,
1096 const TargetData &TD) {
1097 // If the mem operation is a non-constant size, we can't handle it.
1098 ConstantInt *SizeCst = dyn_cast<ConstantInt>(MI->getLength());
1099 if (SizeCst == 0) return -1;
1100 uint64_t MemSizeInBits = SizeCst->getZExtValue()*8;
Chris Lattner778cb922009-12-06 05:29:56 +00001101
1102 // If this is memset, we just need to see if the offset is valid in the size
1103 // of the memset..
Chris Lattner42376062009-12-06 01:57:02 +00001104 if (MI->getIntrinsicID() == Intrinsic::memset)
1105 return AnalyzeLoadFromClobberingWrite(L, MI->getDest(), MemSizeInBits, TD);
1106
Chris Lattner778cb922009-12-06 05:29:56 +00001107 // If we have a memcpy/memmove, the only case we can handle is if this is a
1108 // copy from constant memory. In that case, we can read directly from the
1109 // constant memory.
1110 MemTransferInst *MTI = cast<MemTransferInst>(MI);
1111
1112 Constant *Src = dyn_cast<Constant>(MTI->getSource());
1113 if (Src == 0) return -1;
1114
1115 GlobalVariable *GV = dyn_cast<GlobalVariable>(Src->getUnderlyingObject());
1116 if (GV == 0 || !GV->isConstant()) return -1;
1117
1118 // See if the access is within the bounds of the transfer.
1119 int Offset =
1120 AnalyzeLoadFromClobberingWrite(L, MI->getDest(), MemSizeInBits, TD);
1121 if (Offset == -1)
1122 return Offset;
1123
1124 // Otherwise, see if we can constant fold a load from the constant with the
1125 // offset applied as appropriate.
1126 Src = ConstantExpr::getBitCast(Src,
1127 llvm::Type::getInt8PtrTy(Src->getContext()));
1128 Constant *OffsetCst =
1129 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1130 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
1131 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(L->getType()));
1132 if (ConstantFoldLoadFromConstPtr(Src, &TD))
1133 return Offset;
Chris Lattner42376062009-12-06 01:57:02 +00001134 return -1;
1135}
1136
Chris Lattnerd28f9082009-09-21 06:24:16 +00001137
1138/// GetStoreValueForLoad - This function is called when we have a
1139/// memdep query of a load that ends up being a clobbering store. This means
1140/// that the store *may* provide bits used by the load but we can't be sure
1141/// because the pointers don't mustalias. Check this case to see if there is
1142/// anything more we can do before we give up.
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001143static Value *GetStoreValueForLoad(Value *SrcVal, unsigned Offset,
1144 const Type *LoadTy,
1145 Instruction *InsertPt, const TargetData &TD){
Chris Lattnerd28f9082009-09-21 06:24:16 +00001146 LLVMContext &Ctx = SrcVal->getType()->getContext();
1147
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001148 uint64_t StoreSize = TD.getTypeSizeInBits(SrcVal->getType())/8;
1149 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy)/8;
Chris Lattnerd28f9082009-09-21 06:24:16 +00001150
1151
1152 // Compute which bits of the stored value are being used by the load. Convert
1153 // to an integer type to start with.
1154 if (isa<PointerType>(SrcVal->getType()))
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001155 SrcVal = new PtrToIntInst(SrcVal, TD.getIntPtrType(Ctx), "tmp", InsertPt);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001156 if (!isa<IntegerType>(SrcVal->getType()))
1157 SrcVal = new BitCastInst(SrcVal, IntegerType::get(Ctx, StoreSize*8),
1158 "tmp", InsertPt);
1159
1160 // Shift the bits to the least significant depending on endianness.
1161 unsigned ShiftAmt;
Chris Lattner42376062009-12-06 01:57:02 +00001162 if (TD.isLittleEndian())
Chris Lattnerd28f9082009-09-21 06:24:16 +00001163 ShiftAmt = Offset*8;
Chris Lattner42376062009-12-06 01:57:02 +00001164 else
Chris Lattner24705382009-09-21 17:55:47 +00001165 ShiftAmt = (StoreSize-LoadSize-Offset)*8;
Chris Lattnerd28f9082009-09-21 06:24:16 +00001166
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001167 if (ShiftAmt)
1168 SrcVal = BinaryOperator::CreateLShr(SrcVal,
1169 ConstantInt::get(SrcVal->getType(), ShiftAmt), "tmp", InsertPt);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001170
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001171 if (LoadSize != StoreSize)
1172 SrcVal = new TruncInst(SrcVal, IntegerType::get(Ctx, LoadSize*8),
1173 "tmp", InsertPt);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001174
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001175 return CoerceAvailableValueToLoadType(SrcVal, LoadTy, InsertPt, TD);
Chris Lattnerd28f9082009-09-21 06:24:16 +00001176}
1177
Chris Lattner42376062009-12-06 01:57:02 +00001178/// GetMemInstValueForLoad - This function is called when we have a
1179/// memdep query of a load that ends up being a clobbering mem intrinsic.
1180static Value *GetMemInstValueForLoad(MemIntrinsic *SrcInst, unsigned Offset,
1181 const Type *LoadTy, Instruction *InsertPt,
1182 const TargetData &TD){
1183 LLVMContext &Ctx = LoadTy->getContext();
1184 uint64_t LoadSize = TD.getTypeSizeInBits(LoadTy)/8;
1185
1186 IRBuilder<> Builder(InsertPt->getParent(), InsertPt);
1187
1188 // We know that this method is only called when the mem transfer fully
1189 // provides the bits for the load.
1190 if (MemSetInst *MSI = dyn_cast<MemSetInst>(SrcInst)) {
1191 // memset(P, 'x', 1234) -> splat('x'), even if x is a variable, and
1192 // independently of what the offset is.
1193 Value *Val = MSI->getValue();
1194 if (LoadSize != 1)
1195 Val = Builder.CreateZExt(Val, IntegerType::get(Ctx, LoadSize*8));
1196
1197 Value *OneElt = Val;
1198
1199 // Splat the value out to the right number of bits.
1200 for (unsigned NumBytesSet = 1; NumBytesSet != LoadSize; ) {
1201 // If we can double the number of bytes set, do it.
1202 if (NumBytesSet*2 <= LoadSize) {
1203 Value *ShVal = Builder.CreateShl(Val, NumBytesSet*8);
1204 Val = Builder.CreateOr(Val, ShVal);
1205 NumBytesSet <<= 1;
1206 continue;
1207 }
1208
1209 // Otherwise insert one byte at a time.
1210 Value *ShVal = Builder.CreateShl(Val, 1*8);
1211 Val = Builder.CreateOr(OneElt, ShVal);
1212 ++NumBytesSet;
1213 }
1214
1215 return CoerceAvailableValueToLoadType(Val, LoadTy, InsertPt, TD);
1216 }
Chris Lattner778cb922009-12-06 05:29:56 +00001217
1218 // Otherwise, this is a memcpy/memmove from a constant global.
1219 MemTransferInst *MTI = cast<MemTransferInst>(SrcInst);
1220 Constant *Src = cast<Constant>(MTI->getSource());
1221
1222 // Otherwise, see if we can constant fold a load from the constant with the
1223 // offset applied as appropriate.
1224 Src = ConstantExpr::getBitCast(Src,
1225 llvm::Type::getInt8PtrTy(Src->getContext()));
1226 Constant *OffsetCst =
1227 ConstantInt::get(Type::getInt64Ty(Src->getContext()), (unsigned)Offset);
1228 Src = ConstantExpr::getGetElementPtr(Src, &OffsetCst, 1);
1229 Src = ConstantExpr::getBitCast(Src, PointerType::getUnqual(LoadTy));
1230 return ConstantFoldLoadFromConstPtr(Src, &TD);
Chris Lattner42376062009-12-06 01:57:02 +00001231}
1232
1233
1234
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001235struct AvailableValueInBlock {
1236 /// BB - The basic block in question.
1237 BasicBlock *BB;
Chris Lattner93236ba2009-12-06 04:54:31 +00001238 enum ValType {
1239 SimpleVal, // A simple offsetted value that is accessed.
1240 MemIntrin // A memory intrinsic which is loaded from.
1241 };
1242
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001243 /// V - The value that is live out of the block.
Chris Lattner93236ba2009-12-06 04:54:31 +00001244 PointerIntPair<Value *, 1, ValType> Val;
1245
1246 /// Offset - The byte offset in Val that is interesting for the load query.
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001247 unsigned Offset;
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001248
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001249 static AvailableValueInBlock get(BasicBlock *BB, Value *V,
1250 unsigned Offset = 0) {
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001251 AvailableValueInBlock Res;
1252 Res.BB = BB;
Chris Lattner93236ba2009-12-06 04:54:31 +00001253 Res.Val.setPointer(V);
1254 Res.Val.setInt(SimpleVal);
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001255 Res.Offset = Offset;
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001256 return Res;
1257 }
Chris Lattner93236ba2009-12-06 04:54:31 +00001258
1259 static AvailableValueInBlock getMI(BasicBlock *BB, MemIntrinsic *MI,
1260 unsigned Offset = 0) {
1261 AvailableValueInBlock Res;
1262 Res.BB = BB;
1263 Res.Val.setPointer(MI);
1264 Res.Val.setInt(MemIntrin);
1265 Res.Offset = Offset;
1266 return Res;
1267 }
1268
1269 bool isSimpleValue() const { return Val.getInt() == SimpleVal; }
1270 Value *getSimpleValue() const {
1271 assert(isSimpleValue() && "Wrong accessor");
1272 return Val.getPointer();
1273 }
1274
1275 MemIntrinsic *getMemIntrinValue() const {
1276 assert(!isSimpleValue() && "Wrong accessor");
1277 return cast<MemIntrinsic>(Val.getPointer());
1278 }
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001279};
1280
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001281/// ConstructSSAForLoadSet - Given a set of loads specified by ValuesPerBlock,
1282/// construct SSA form, allowing us to eliminate LI. This returns the value
1283/// that should be used at LI's definition site.
1284static Value *ConstructSSAForLoadSet(LoadInst *LI,
1285 SmallVectorImpl<AvailableValueInBlock> &ValuesPerBlock,
1286 const TargetData *TD,
1287 AliasAnalysis *AA) {
1288 SmallVector<PHINode*, 8> NewPHIs;
1289 SSAUpdater SSAUpdate(&NewPHIs);
1290 SSAUpdate.Initialize(LI);
1291
1292 const Type *LoadTy = LI->getType();
1293
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001294 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
Chris Lattner93236ba2009-12-06 04:54:31 +00001295 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1296 BasicBlock *BB = AV.BB;
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001297
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001298 if (SSAUpdate.HasValueForBlock(BB))
1299 continue;
Chris Lattner93236ba2009-12-06 04:54:31 +00001300
1301 unsigned Offset = AV.Offset;
1302
1303 Value *AvailableVal;
1304 if (AV.isSimpleValue()) {
1305 AvailableVal = AV.getSimpleValue();
1306 if (AvailableVal->getType() != LoadTy) {
1307 assert(TD && "Need target data to handle type mismatch case");
1308 AvailableVal = GetStoreValueForLoad(AvailableVal, Offset, LoadTy,
1309 BB->getTerminator(), *TD);
1310
1311 DEBUG(errs() << "GVN COERCED NONLOCAL VAL:\nOffset: " << Offset << " "
1312 << *AV.getSimpleValue() << '\n'
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001313 << *AvailableVal << '\n' << "\n\n\n");
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001314 }
Chris Lattner93236ba2009-12-06 04:54:31 +00001315 } else {
1316 AvailableVal = GetMemInstValueForLoad(AV.getMemIntrinValue(), Offset,
1317 LoadTy, BB->getTerminator(), *TD);
1318 DEBUG(errs() << "GVN COERCED NONLOCAL MEM INTRIN:\nOffset: " << Offset
1319 << " " << *AV.getMemIntrinValue() << '\n'
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001320 << *AvailableVal << '\n' << "\n\n\n");
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001321 }
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001322 SSAUpdate.AddAvailableValue(BB, AvailableVal);
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001323 }
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001324
1325 // Perform PHI construction.
1326 Value *V = SSAUpdate.GetValueInMiddleOfBlock(LI->getParent());
1327
1328 // If new PHI nodes were created, notify alias analysis.
1329 if (isa<PointerType>(V->getType()))
1330 for (unsigned i = 0, e = NewPHIs.size(); i != e; ++i)
1331 AA->copyValue(LI, NewPHIs[i]);
1332
1333 return V;
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001334}
1335
Owen Andersonb9878ee2009-12-02 07:35:19 +00001336static bool isLifetimeStart(Instruction *Inst) {
Chris Lattnerc4680252009-12-02 06:44:58 +00001337 if (IntrinsicInst* II = dyn_cast<IntrinsicInst>(Inst))
Owen Andersonb9878ee2009-12-02 07:35:19 +00001338 return II->getIntrinsicID() == Intrinsic::lifetime_start;
Chris Lattnerc4680252009-12-02 06:44:58 +00001339 return false;
1340}
1341
Owen Anderson221a4362007-08-16 22:02:55 +00001342/// processNonLocalLoad - Attempt to eliminate a load whose dependencies are
1343/// non-local by performing PHI construction.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001344bool GVN::processNonLocalLoad(LoadInst *LI,
Chris Lattner804209d2008-03-21 22:01:16 +00001345 SmallVectorImpl<Instruction*> &toErase) {
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001346 // Find the non-local dependencies of the load.
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001347 SmallVector<MemoryDependenceAnalysis::NonLocalDepEntry, 64> Deps;
Chris Lattnerb6fc4b82008-12-09 19:25:07 +00001348 MD->getNonLocalPointerDependency(LI->getOperand(0), true, LI->getParent(),
1349 Deps);
Dan Gohmanef3ef7f2009-07-31 20:24:18 +00001350 //DEBUG(errs() << "INVESTIGATING NONLOCAL LOAD: "
1351 // << Deps.size() << *LI << '\n');
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001352
Owen Andersonb39e0de2008-08-26 22:07:42 +00001353 // If we had to process more than one hundred blocks to find the
1354 // dependencies, this load isn't worth worrying about. Optimizing
1355 // it will be too expensive.
Chris Lattnerb6fc4b82008-12-09 19:25:07 +00001356 if (Deps.size() > 100)
Owen Andersonb39e0de2008-08-26 22:07:42 +00001357 return false;
Chris Lattnerb6372932008-12-18 00:51:32 +00001358
1359 // If we had a phi translation failure, we'll have a single entry which is a
1360 // clobber in the current block. Reject this early.
Torok Edwinba93ea72009-06-17 18:48:18 +00001361 if (Deps.size() == 1 && Deps[0].second.isClobber()) {
1362 DEBUG(
Dan Gohman1ddf98a2009-07-25 01:43:01 +00001363 errs() << "GVN: non-local load ";
1364 WriteAsOperand(errs(), LI);
Dan Gohmanef3ef7f2009-07-31 20:24:18 +00001365 errs() << " is clobbered by " << *Deps[0].second.getInst() << '\n';
Torok Edwinba93ea72009-06-17 18:48:18 +00001366 );
Chris Lattnerb6372932008-12-18 00:51:32 +00001367 return false;
Torok Edwinba93ea72009-06-17 18:48:18 +00001368 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001369
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001370 // Filter out useless results (non-locals, etc). Keep track of the blocks
1371 // where we have a value available in repl, also keep track of whether we see
1372 // dependencies that produce an unknown value for the load (such as a call
1373 // that could potentially clobber the load).
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001374 SmallVector<AvailableValueInBlock, 16> ValuesPerBlock;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001375 SmallVector<BasicBlock*, 16> UnavailableBlocks;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001376
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001377 const TargetData *TD = 0;
1378
Chris Lattnerb6fc4b82008-12-09 19:25:07 +00001379 for (unsigned i = 0, e = Deps.size(); i != e; ++i) {
1380 BasicBlock *DepBB = Deps[i].first;
1381 MemDepResult DepInfo = Deps[i].second;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001382
Chris Lattner0e3d6332008-12-05 21:04:20 +00001383 if (DepInfo.isClobber()) {
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001384 // If the dependence is to a store that writes to a superset of the bits
1385 // read by the load, we can extract the bits we need for the load from the
1386 // stored value.
1387 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInfo.getInst())) {
1388 if (TD == 0)
1389 TD = getAnalysisIfAvailable<TargetData>();
1390 if (TD) {
1391 int Offset = AnalyzeLoadFromClobberingStore(LI, DepSI, *TD);
1392 if (Offset != -1) {
1393 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1394 DepSI->getOperand(0),
1395 Offset));
1396 continue;
1397 }
1398 }
1399 }
Chris Lattner42376062009-12-06 01:57:02 +00001400
Chris Lattner42376062009-12-06 01:57:02 +00001401 // If the clobbering value is a memset/memcpy/memmove, see if we can
1402 // forward a value on from it.
Chris Lattner93236ba2009-12-06 04:54:31 +00001403 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(DepInfo.getInst())) {
Chris Lattner42376062009-12-06 01:57:02 +00001404 if (TD == 0)
1405 TD = getAnalysisIfAvailable<TargetData>();
1406 if (TD) {
Chris Lattner93236ba2009-12-06 04:54:31 +00001407 int Offset = AnalyzeLoadFromClobberingMemInst(LI, DepMI, *TD);
1408 if (Offset != -1) {
1409 ValuesPerBlock.push_back(AvailableValueInBlock::getMI(DepBB, DepMI,
1410 Offset));
1411 continue;
1412 }
Chris Lattner42376062009-12-06 01:57:02 +00001413 }
1414 }
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001415
Chris Lattner0e3d6332008-12-05 21:04:20 +00001416 UnavailableBlocks.push_back(DepBB);
1417 continue;
1418 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001419
Chris Lattner0e3d6332008-12-05 21:04:20 +00001420 Instruction *DepInst = DepInfo.getInst();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001421
Chris Lattner0e3d6332008-12-05 21:04:20 +00001422 // Loading the allocation -> undef.
Chris Lattnerc4680252009-12-02 06:44:58 +00001423 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst) ||
Owen Andersonb9878ee2009-12-02 07:35:19 +00001424 // Loading immediately after lifetime begin -> undef.
1425 isLifetimeStart(DepInst)) {
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001426 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1427 UndefValue::get(LI->getType())));
Chris Lattner7e61daf2008-12-01 01:15:42 +00001428 continue;
1429 }
Owen Anderson2b2bd282009-10-28 07:05:35 +00001430
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001431 if (StoreInst *S = dyn_cast<StoreInst>(DepInst)) {
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001432 // Reject loads and stores that are to the same address but are of
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001433 // different types if we have to.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001434 if (S->getOperand(0)->getType() != LI->getType()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001435 if (TD == 0)
1436 TD = getAnalysisIfAvailable<TargetData>();
1437
1438 // If the stored value is larger or equal to the loaded value, we can
1439 // reuse it.
Chris Lattner9045f232009-09-21 17:24:04 +00001440 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(S->getOperand(0),
1441 LI->getType(), *TD)) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001442 UnavailableBlocks.push_back(DepBB);
1443 continue;
1444 }
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001445 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001446
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001447 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB,
1448 S->getOperand(0)));
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001449 continue;
1450 }
1451
1452 if (LoadInst *LD = dyn_cast<LoadInst>(DepInst)) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001453 // If the types mismatch and we can't handle it, reject reuse of the load.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001454 if (LD->getType() != LI->getType()) {
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001455 if (TD == 0)
1456 TD = getAnalysisIfAvailable<TargetData>();
1457
1458 // If the stored value is larger or equal to the loaded value, we can
1459 // reuse it.
Chris Lattner9045f232009-09-21 17:24:04 +00001460 if (TD == 0 || !CanCoerceMustAliasedValueToLoad(LD, LI->getType(),*TD)){
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001461 UnavailableBlocks.push_back(DepBB);
1462 continue;
1463 }
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001464 }
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001465 ValuesPerBlock.push_back(AvailableValueInBlock::get(DepBB, LD));
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001466 continue;
Owen Anderson5e5599b2007-07-25 19:57:03 +00001467 }
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001468
1469 UnavailableBlocks.push_back(DepBB);
1470 continue;
Chris Lattner2876a642008-03-21 21:14:38 +00001471 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001472
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001473 // If we have no predecessors that produce a known value for this load, exit
1474 // early.
1475 if (ValuesPerBlock.empty()) return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001476
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001477 // If all of the instructions we depend on produce a known value for this
1478 // load, then it is fully redundant and we can use PHI insertion to compute
1479 // its value. Insert PHIs and remove the fully redundant value now.
1480 if (UnavailableBlocks.empty()) {
Dan Gohmanef3ef7f2009-07-31 20:24:18 +00001481 DEBUG(errs() << "GVN REMOVING NONLOCAL LOAD: " << *LI << '\n');
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001482
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001483 // Perform PHI construction.
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001484 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD,
1485 VN.getAliasAnalysis());
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001486 LI->replaceAllUsesWith(V);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001487
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001488 if (isa<PHINode>(V))
1489 V->takeName(LI);
1490 if (isa<PointerType>(V->getType()))
1491 MD->invalidateCachedPointerInfo(V);
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001492 toErase.push_back(LI);
1493 NumGVNLoad++;
1494 return true;
1495 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001496
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001497 if (!EnablePRE || !EnableLoadPRE)
1498 return false;
1499
1500 // Okay, we have *some* definitions of the value. This means that the value
1501 // is available in some of our (transitive) predecessors. Lets think about
1502 // doing PRE of this load. This will involve inserting a new load into the
1503 // predecessor when it's not available. We could do this in general, but
1504 // prefer to not increase code size. As such, we only do this when we know
1505 // that we only have to insert *one* load (which means we're basically moving
1506 // the load, not inserting a new one).
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001507
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001508 SmallPtrSet<BasicBlock *, 4> Blockers;
1509 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1510 Blockers.insert(UnavailableBlocks[i]);
1511
1512 // Lets find first basic block with more than one predecessor. Walk backwards
1513 // through predecessors if needed.
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001514 BasicBlock *LoadBB = LI->getParent();
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001515 BasicBlock *TmpBB = LoadBB;
1516
1517 bool isSinglePred = false;
Dale Johannesen81b64632009-06-17 20:48:23 +00001518 bool allSingleSucc = true;
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001519 while (TmpBB->getSinglePredecessor()) {
1520 isSinglePred = true;
1521 TmpBB = TmpBB->getSinglePredecessor();
1522 if (!TmpBB) // If haven't found any, bail now.
1523 return false;
1524 if (TmpBB == LoadBB) // Infinite (unreachable) loop.
1525 return false;
1526 if (Blockers.count(TmpBB))
1527 return false;
Dale Johannesen81b64632009-06-17 20:48:23 +00001528 if (TmpBB->getTerminator()->getNumSuccessors() != 1)
1529 allSingleSucc = false;
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001530 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001531
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001532 assert(TmpBB);
1533 LoadBB = TmpBB;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001534
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001535 // If we have a repl set with LI itself in it, this means we have a loop where
1536 // at least one of the values is LI. Since this means that we won't be able
1537 // to eliminate LI even if we insert uses in the other predecessors, we will
1538 // end up increasing code size. Reject this by scanning for LI.
1539 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i)
Chris Lattner93236ba2009-12-06 04:54:31 +00001540 if (ValuesPerBlock[i].isSimpleValue() &&
1541 ValuesPerBlock[i].getSimpleValue() == LI)
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001542 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001543
Chris Lattner93236ba2009-12-06 04:54:31 +00001544 // FIXME: It is extremely unclear what this loop is doing, other than
1545 // artificially restricting loadpre.
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001546 if (isSinglePred) {
1547 bool isHot = false;
Chris Lattner93236ba2009-12-06 04:54:31 +00001548 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i) {
1549 const AvailableValueInBlock &AV = ValuesPerBlock[i];
1550 if (AV.isSimpleValue())
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001551 // "Hot" Instruction is in some loop (because it dominates its dep.
1552 // instruction).
Chris Lattner93236ba2009-12-06 04:54:31 +00001553 if (Instruction *I = dyn_cast<Instruction>(AV.getSimpleValue()))
1554 if (DT->dominates(LI, I)) {
1555 isHot = true;
1556 break;
1557 }
1558 }
Owen Andersoncc0c75c2009-05-31 09:03:40 +00001559
1560 // We are interested only in "hot" instructions. We don't want to do any
1561 // mis-optimizations here.
1562 if (!isHot)
1563 return false;
1564 }
1565
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001566 // Okay, we have some hope :). Check to see if the loaded value is fully
1567 // available in all but one predecessor.
1568 // FIXME: If we could restructure the CFG, we could make a common pred with
1569 // all the preds that don't have an available LI and insert a new load into
1570 // that one block.
1571 BasicBlock *UnavailablePred = 0;
1572
Chris Lattnerd2a653a2008-12-05 07:49:08 +00001573 DenseMap<BasicBlock*, char> FullyAvailableBlocks;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001574 for (unsigned i = 0, e = ValuesPerBlock.size(); i != e; ++i)
Chris Lattner0cdc17e2009-09-21 06:30:24 +00001575 FullyAvailableBlocks[ValuesPerBlock[i].BB] = true;
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001576 for (unsigned i = 0, e = UnavailableBlocks.size(); i != e; ++i)
1577 FullyAvailableBlocks[UnavailableBlocks[i]] = false;
1578
1579 for (pred_iterator PI = pred_begin(LoadBB), E = pred_end(LoadBB);
1580 PI != E; ++PI) {
1581 if (IsValueFullyAvailableInBlock(*PI, FullyAvailableBlocks))
1582 continue;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001583
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001584 // If this load is not available in multiple predecessors, reject it.
1585 if (UnavailablePred && UnavailablePred != *PI)
1586 return false;
1587 UnavailablePred = *PI;
1588 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001589
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001590 assert(UnavailablePred != 0 &&
1591 "Fully available value should be eliminated above!");
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001592
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001593 // We don't currently handle critical edges :(
1594 if (UnavailablePred->getTerminator()->getNumSuccessors() != 1) {
Daniel Dunbar0dd5e1e2009-07-25 00:23:56 +00001595 DEBUG(errs() << "COULD NOT PRE LOAD BECAUSE OF CRITICAL EDGE '"
Dan Gohmanef3ef7f2009-07-31 20:24:18 +00001596 << UnavailablePred->getName() << "': " << *LI << '\n');
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001597 return false;
Owen Anderson0cc1a762007-08-07 23:12:31 +00001598 }
Chris Lattner25be93d2009-11-27 08:25:10 +00001599
Chris Lattner2be52e72009-11-27 22:05:15 +00001600 // Do PHI translation to get its value in the predecessor if necessary. The
1601 // returned pointer (if non-null) is guaranteed to dominate UnavailablePred.
1602 //
Chris Lattner44da5bd2009-11-28 15:39:14 +00001603 SmallVector<Instruction*, 8> NewInsts;
Chris Lattnercf0b1982009-11-27 22:50:07 +00001604
Chris Lattner32140312009-11-28 16:08:18 +00001605 // If all preds have a single successor, then we know it is safe to insert the
1606 // load on the pred (?!?), so we can insert code to materialize the pointer if
1607 // it is not available.
Chris Lattner972e6d82009-12-09 01:59:31 +00001608 PHITransAddr Address(LI->getOperand(0), TD);
1609 Value *LoadPtr = 0;
Chris Lattner32140312009-11-28 16:08:18 +00001610 if (allSingleSucc) {
Chris Lattner972e6d82009-12-09 01:59:31 +00001611 LoadPtr = Address.PHITranslateWithInsertion(LoadBB, UnavailablePred,
1612 *DT, NewInsts);
Chris Lattner32140312009-11-28 16:08:18 +00001613 } else {
Chris Lattner972e6d82009-12-09 01:59:31 +00001614 Address.PHITranslateValue(LoadBB, UnavailablePred);
1615 LoadPtr = Address.getAddr();
1616
1617 // Make sure the value is live in the predecessor.
1618 if (Instruction *Inst = dyn_cast_or_null<Instruction>(LoadPtr))
1619 if (!DT->dominates(Inst->getParent(), UnavailablePred))
1620 LoadPtr = 0;
1621 }
1622
1623 // If we couldn't find or insert a computation of this phi translated value,
1624 // we fail PRE.
1625 if (LoadPtr == 0) {
1626 assert(NewInsts.empty() && "Shouldn't insert insts on failure");
1627 DEBUG(errs() << "COULDN'T INSERT PHI TRANSLATED VALUE OF: "
1628 << *LI->getOperand(0) << "\n");
1629 return false;
Chris Lattner32140312009-11-28 16:08:18 +00001630 }
Owen Anderson0b6e2602009-12-03 03:43:29 +00001631
1632 // Assign value numbers to these new instructions.
Chris Lattner972e6d82009-12-09 01:59:31 +00001633 for (unsigned i = 0, e = NewInsts.size(); i != e; ++i) {
Owen Anderson0b6e2602009-12-03 03:43:29 +00001634 // FIXME: We really _ought_ to insert these value numbers into their
1635 // parent's availability map. However, in doing so, we risk getting into
1636 // ordering issues. If a block hasn't been processed yet, we would be
1637 // marking a value as AVAIL-IN, which isn't what we intend.
Chris Lattner972e6d82009-12-09 01:59:31 +00001638 VN.lookup_or_add(NewInsts[i]);
Chris Lattner25be93d2009-11-27 08:25:10 +00001639 }
1640
Dale Johannesen81b64632009-06-17 20:48:23 +00001641 // Make sure it is valid to move this load here. We have to watch out for:
1642 // @1 = getelementptr (i8* p, ...
1643 // test p and branch if == 0
1644 // load @1
1645 // It is valid to have the getelementptr before the test, even if p can be 0,
1646 // as getelementptr only does address arithmetic.
1647 // If we are not pushing the value through any multiple-successor blocks
1648 // we do not have this case. Otherwise, check that the load is safe to
1649 // put anywhere; this can be improved, but should be conservatively safe.
1650 if (!allSingleSucc &&
Chris Lattner44da5bd2009-11-28 15:39:14 +00001651 // FIXME: REEVALUTE THIS.
Chris Lattner32140312009-11-28 16:08:18 +00001652 !isSafeToLoadUnconditionally(LoadPtr, UnavailablePred->getTerminator())) {
1653 assert(NewInsts.empty() && "Should not have inserted instructions");
Dale Johannesen81b64632009-06-17 20:48:23 +00001654 return false;
Chris Lattner32140312009-11-28 16:08:18 +00001655 }
Dale Johannesen81b64632009-06-17 20:48:23 +00001656
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001657 // Okay, we can eliminate this load by inserting a reload in the predecessor
1658 // and using PHI construction to get the value in the other predecessors, do
1659 // it.
Dan Gohmanef3ef7f2009-07-31 20:24:18 +00001660 DEBUG(errs() << "GVN REMOVING PRE LOAD: " << *LI << '\n');
Chris Lattner32140312009-11-28 16:08:18 +00001661 DEBUG(if (!NewInsts.empty())
1662 errs() << "INSERTED " << NewInsts.size() << " INSTS: "
1663 << *NewInsts.back() << '\n');
1664
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001665 Value *NewLoad = new LoadInst(LoadPtr, LI->getName()+".pre", false,
1666 LI->getAlignment(),
1667 UnavailablePred->getTerminator());
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001668
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001669 // Add the newly created load.
1670 ValuesPerBlock.push_back(AvailableValueInBlock::get(UnavailablePred,NewLoad));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001671
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001672 // Perform PHI construction.
Chris Lattnerb6c65fa2009-10-10 23:50:30 +00001673 Value *V = ConstructSSAForLoadSet(LI, ValuesPerBlock, TD,
1674 VN.getAliasAnalysis());
Chris Lattnera0aa8fb2009-09-20 20:09:34 +00001675 LI->replaceAllUsesWith(V);
1676 if (isa<PHINode>(V))
1677 V->takeName(LI);
1678 if (isa<PointerType>(V->getType()))
1679 MD->invalidateCachedPointerInfo(V);
Chris Lattner1db9bbe2008-12-02 08:16:11 +00001680 toErase.push_back(LI);
1681 NumPRELoad++;
Owen Anderson5e5599b2007-07-25 19:57:03 +00001682 return true;
1683}
1684
Owen Anderson221a4362007-08-16 22:02:55 +00001685/// processLoad - Attempt to eliminate a load, first by eliminating it
1686/// locally, and then attempting non-local elimination if that fails.
Chris Lattner0e3d6332008-12-05 21:04:20 +00001687bool GVN::processLoad(LoadInst *L, SmallVectorImpl<Instruction*> &toErase) {
Dan Gohman81132462009-11-14 02:27:51 +00001688 if (!MD)
1689 return false;
1690
Chris Lattner0e3d6332008-12-05 21:04:20 +00001691 if (L->isVolatile())
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001692 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001693
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001694 // ... to a pointer that has been loaded from before...
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001695 MemDepResult Dep = MD->getDependency(L);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001696
Chris Lattner0e3d6332008-12-05 21:04:20 +00001697 // If the value isn't available, don't do anything!
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001698 if (Dep.isClobber()) {
Chris Lattner0a9616d2009-09-21 05:57:11 +00001699 // Check to see if we have something like this:
Chris Lattner1dd48c32009-09-20 19:03:47 +00001700 // store i32 123, i32* %P
1701 // %A = bitcast i32* %P to i8*
1702 // %B = gep i8* %A, i32 1
1703 // %C = load i8* %B
1704 //
1705 // We could do that by recognizing if the clobber instructions are obviously
1706 // a common base + constant offset, and if the previous store (or memset)
1707 // completely covers this load. This sort of thing can happen in bitfield
1708 // access code.
Chris Lattner42376062009-12-06 01:57:02 +00001709 Value *AvailVal = 0;
Chris Lattner0a9616d2009-09-21 05:57:11 +00001710 if (StoreInst *DepSI = dyn_cast<StoreInst>(Dep.getInst()))
Chris Lattner9d7fb292009-09-21 06:22:46 +00001711 if (const TargetData *TD = getAnalysisIfAvailable<TargetData>()) {
Chris Lattner4d8af2f2009-09-21 06:48:08 +00001712 int Offset = AnalyzeLoadFromClobberingStore(L, DepSI, *TD);
Chris Lattner42376062009-12-06 01:57:02 +00001713 if (Offset != -1)
1714 AvailVal = GetStoreValueForLoad(DepSI->getOperand(0), Offset,
1715 L->getType(), L, *TD);
Chris Lattner9d7fb292009-09-21 06:22:46 +00001716 }
Chris Lattner0a9616d2009-09-21 05:57:11 +00001717
Chris Lattner42376062009-12-06 01:57:02 +00001718 // If the clobbering value is a memset/memcpy/memmove, see if we can forward
1719 // a value on from it.
1720 if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(Dep.getInst())) {
1721 if (const TargetData *TD = getAnalysisIfAvailable<TargetData>()) {
1722 int Offset = AnalyzeLoadFromClobberingMemInst(L, DepMI, *TD);
1723 if (Offset != -1)
1724 AvailVal = GetMemInstValueForLoad(DepMI, Offset, L->getType(), L,*TD);
1725 }
1726 }
1727
1728 if (AvailVal) {
1729 DEBUG(errs() << "GVN COERCED INST:\n" << *Dep.getInst() << '\n'
1730 << *AvailVal << '\n' << *L << "\n\n\n");
1731
1732 // Replace the load!
1733 L->replaceAllUsesWith(AvailVal);
1734 if (isa<PointerType>(AvailVal->getType()))
1735 MD->invalidateCachedPointerInfo(AvailVal);
1736 toErase.push_back(L);
1737 NumGVNLoad++;
1738 return true;
1739 }
1740
Torok Edwin72070282009-05-29 09:46:03 +00001741 DEBUG(
1742 // fast print dep, using operator<< on instruction would be too slow
Dan Gohman1ddf98a2009-07-25 01:43:01 +00001743 errs() << "GVN: load ";
1744 WriteAsOperand(errs(), L);
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001745 Instruction *I = Dep.getInst();
Dan Gohmanef3ef7f2009-07-31 20:24:18 +00001746 errs() << " is clobbered by " << *I << '\n';
Torok Edwin72070282009-05-29 09:46:03 +00001747 );
Chris Lattner0e3d6332008-12-05 21:04:20 +00001748 return false;
Torok Edwin72070282009-05-29 09:46:03 +00001749 }
Chris Lattner0e3d6332008-12-05 21:04:20 +00001750
1751 // If it is defined in another block, try harder.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001752 if (Dep.isNonLocal())
Chris Lattner0e3d6332008-12-05 21:04:20 +00001753 return processNonLocalLoad(L, toErase);
Eli Friedman716c10c2008-02-12 12:08:14 +00001754
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001755 Instruction *DepInst = Dep.getInst();
Chris Lattner0e3d6332008-12-05 21:04:20 +00001756 if (StoreInst *DepSI = dyn_cast<StoreInst>(DepInst)) {
Chris Lattner1dd48c32009-09-20 19:03:47 +00001757 Value *StoredVal = DepSI->getOperand(0);
1758
1759 // The store and load are to a must-aliased pointer, but they may not
1760 // actually have the same type. See if we know how to reuse the stored
1761 // value (depending on its type).
1762 const TargetData *TD = 0;
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001763 if (StoredVal->getType() != L->getType()) {
1764 if ((TD = getAnalysisIfAvailable<TargetData>())) {
1765 StoredVal = CoerceAvailableValueToLoadType(StoredVal, L->getType(),
1766 L, *TD);
1767 if (StoredVal == 0)
1768 return false;
1769
1770 DEBUG(errs() << "GVN COERCED STORE:\n" << *DepSI << '\n' << *StoredVal
1771 << '\n' << *L << "\n\n\n");
1772 }
1773 else
Chris Lattner1dd48c32009-09-20 19:03:47 +00001774 return false;
Chris Lattner1dd48c32009-09-20 19:03:47 +00001775 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001776
Chris Lattner0e3d6332008-12-05 21:04:20 +00001777 // Remove it!
Chris Lattner1dd48c32009-09-20 19:03:47 +00001778 L->replaceAllUsesWith(StoredVal);
1779 if (isa<PointerType>(StoredVal->getType()))
1780 MD->invalidateCachedPointerInfo(StoredVal);
Chris Lattner0e3d6332008-12-05 21:04:20 +00001781 toErase.push_back(L);
1782 NumGVNLoad++;
1783 return true;
1784 }
1785
1786 if (LoadInst *DepLI = dyn_cast<LoadInst>(DepInst)) {
Chris Lattner1dd48c32009-09-20 19:03:47 +00001787 Value *AvailableVal = DepLI;
1788
1789 // The loads are of a must-aliased pointer, but they may not actually have
1790 // the same type. See if we know how to reuse the previously loaded value
1791 // (depending on its type).
1792 const TargetData *TD = 0;
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001793 if (DepLI->getType() != L->getType()) {
1794 if ((TD = getAnalysisIfAvailable<TargetData>())) {
1795 AvailableVal = CoerceAvailableValueToLoadType(DepLI, L->getType(), L,*TD);
1796 if (AvailableVal == 0)
1797 return false;
Chris Lattner1dd48c32009-09-20 19:03:47 +00001798
Chris Lattner8ed7bef2009-10-21 04:11:19 +00001799 DEBUG(errs() << "GVN COERCED LOAD:\n" << *DepLI << "\n" << *AvailableVal
1800 << "\n" << *L << "\n\n\n");
1801 }
1802 else
1803 return false;
Chris Lattner1dd48c32009-09-20 19:03:47 +00001804 }
1805
Chris Lattner0e3d6332008-12-05 21:04:20 +00001806 // Remove it!
Chris Lattner1dd48c32009-09-20 19:03:47 +00001807 L->replaceAllUsesWith(AvailableVal);
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00001808 if (isa<PointerType>(DepLI->getType()))
1809 MD->invalidateCachedPointerInfo(DepLI);
Chris Lattner0e3d6332008-12-05 21:04:20 +00001810 toErase.push_back(L);
1811 NumGVNLoad++;
1812 return true;
1813 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001814
Chris Lattner3ff6d012008-11-30 01:39:32 +00001815 // If this load really doesn't depend on anything, then we must be loading an
1816 // undef value. This can happen when loading for a fresh allocation with no
1817 // intervening stores, for example.
Victor Hernandez8acf2952009-10-23 21:09:37 +00001818 if (isa<AllocaInst>(DepInst) || isMalloc(DepInst)) {
Owen Andersonb292b8c2009-07-30 23:03:37 +00001819 L->replaceAllUsesWith(UndefValue::get(L->getType()));
Chris Lattner3ff6d012008-11-30 01:39:32 +00001820 toErase.push_back(L);
Chris Lattner3ff6d012008-11-30 01:39:32 +00001821 NumGVNLoad++;
Chris Lattner0e3d6332008-12-05 21:04:20 +00001822 return true;
Eli Friedman716c10c2008-02-12 12:08:14 +00001823 }
Owen Anderson2b2bd282009-10-28 07:05:35 +00001824
Owen Andersonb9878ee2009-12-02 07:35:19 +00001825 // If this load occurs either right after a lifetime begin,
Owen Anderson2b2bd282009-10-28 07:05:35 +00001826 // then the loaded value is undefined.
1827 if (IntrinsicInst* II = dyn_cast<IntrinsicInst>(DepInst)) {
Owen Andersonb9878ee2009-12-02 07:35:19 +00001828 if (II->getIntrinsicID() == Intrinsic::lifetime_start) {
Owen Anderson2b2bd282009-10-28 07:05:35 +00001829 L->replaceAllUsesWith(UndefValue::get(L->getType()));
1830 toErase.push_back(L);
1831 NumGVNLoad++;
1832 return true;
1833 }
1834 }
Eli Friedman716c10c2008-02-12 12:08:14 +00001835
Chris Lattner0e3d6332008-12-05 21:04:20 +00001836 return false;
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001837}
1838
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001839Value *GVN::lookupNumber(BasicBlock *BB, uint32_t num) {
Owen Anderson54e02192008-06-23 17:49:45 +00001840 DenseMap<BasicBlock*, ValueNumberScope*>::iterator I = localAvail.find(BB);
1841 if (I == localAvail.end())
1842 return 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001843
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001844 ValueNumberScope *Locals = I->second;
1845 while (Locals) {
1846 DenseMap<uint32_t, Value*>::iterator I = Locals->table.find(num);
1847 if (I != Locals->table.end())
Owen Anderson1b3ea962008-06-20 01:15:47 +00001848 return I->second;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001849 Locals = Locals->parent;
Owen Anderson1b3ea962008-06-20 01:15:47 +00001850 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001851
Owen Anderson1b3ea962008-06-20 01:15:47 +00001852 return 0;
1853}
1854
Owen Andersonbfe133e2008-12-15 02:03:00 +00001855
Owen Anderson398602a2007-08-14 18:16:29 +00001856/// processInstruction - When calculating availability, handle an instruction
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001857/// by inserting it into the appropriate sets
Owen Andersonaccdca12008-06-12 19:25:32 +00001858bool GVN::processInstruction(Instruction *I,
Chris Lattner804209d2008-03-21 22:01:16 +00001859 SmallVectorImpl<Instruction*> &toErase) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001860 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
1861 bool Changed = processLoad(LI, toErase);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001862
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001863 if (!Changed) {
1864 unsigned Num = VN.lookup_or_add(LI);
1865 localAvail[I->getParent()]->table.insert(std::make_pair(Num, LI));
Owen Anderson6a903bc2008-06-18 21:41:49 +00001866 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001867
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001868 return Changed;
Owen Anderson6a903bc2008-06-18 21:41:49 +00001869 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001870
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001871 uint32_t NextNum = VN.getNextUnusedValueNumber();
1872 unsigned Num = VN.lookup_or_add(I);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001873
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001874 if (BranchInst *BI = dyn_cast<BranchInst>(I)) {
1875 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001876
Owen Anderson98f912b2009-04-01 23:53:49 +00001877 if (!BI->isConditional() || isa<Constant>(BI->getCondition()))
1878 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001879
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001880 Value *BranchCond = BI->getCondition();
1881 uint32_t CondVN = VN.lookup_or_add(BranchCond);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001882
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001883 BasicBlock *TrueSucc = BI->getSuccessor(0);
1884 BasicBlock *FalseSucc = BI->getSuccessor(1);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001885
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001886 if (TrueSucc->getSinglePredecessor())
1887 localAvail[TrueSucc]->table[CondVN] =
1888 ConstantInt::getTrue(TrueSucc->getContext());
1889 if (FalseSucc->getSinglePredecessor())
1890 localAvail[FalseSucc]->table[CondVN] =
1891 ConstantInt::getFalse(TrueSucc->getContext());
Owen Anderson98f912b2009-04-01 23:53:49 +00001892
1893 return false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001894
Owen Anderson0c1e6342008-04-07 09:59:07 +00001895 // Allocations are always uniquely numbered, so we can save time and memory
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001896 // by fast failing them.
Victor Hernandez8acf2952009-10-23 21:09:37 +00001897 } else if (isa<AllocaInst>(I) || isa<TerminatorInst>(I)) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001898 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Anderson0c1e6342008-04-07 09:59:07 +00001899 return false;
Owen Anderson6a903bc2008-06-18 21:41:49 +00001900 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001901
Owen Anderson221a4362007-08-16 22:02:55 +00001902 // Collapse PHI nodes
Owen Andersonbc271a02007-08-14 18:33:27 +00001903 if (PHINode* p = dyn_cast<PHINode>(I)) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001904 Value *constVal = CollapsePhi(p);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001905
Owen Andersonbc271a02007-08-14 18:33:27 +00001906 if (constVal) {
Owen Andersonf5023a72007-08-16 22:51:56 +00001907 p->replaceAllUsesWith(constVal);
Dan Gohman81132462009-11-14 02:27:51 +00001908 if (MD && isa<PointerType>(constVal->getType()))
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00001909 MD->invalidateCachedPointerInfo(constVal);
Owen Anderson164274e2008-12-23 00:49:51 +00001910 VN.erase(p);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001911
Owen Andersonf5023a72007-08-16 22:51:56 +00001912 toErase.push_back(p);
Owen Anderson6a903bc2008-06-18 21:41:49 +00001913 } else {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001914 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Andersonbc271a02007-08-14 18:33:27 +00001915 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001916
Owen Anderson3ea90a72008-07-03 17:44:33 +00001917 // If the number we were assigned was a brand new VN, then we don't
1918 // need to do a lookup to see if the number already exists
1919 // somewhere in the domtree: it can't!
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001920 } else if (Num == NextNum) {
1921 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001922
Owen Andersonbfe133e2008-12-15 02:03:00 +00001923 // Perform fast-path value-number based elimination of values inherited from
1924 // dominators.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001925 } else if (Value *repl = lookupNumber(I->getParent(), Num)) {
Owen Anderson086b2c42007-12-08 01:37:09 +00001926 // Remove it!
Owen Anderson10ffa862007-07-31 23:27:13 +00001927 VN.erase(I);
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001928 I->replaceAllUsesWith(repl);
Dan Gohman81132462009-11-14 02:27:51 +00001929 if (MD && isa<PointerType>(repl->getType()))
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00001930 MD->invalidateCachedPointerInfo(repl);
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001931 toErase.push_back(I);
1932 return true;
Owen Andersonbfe133e2008-12-15 02:03:00 +00001933
Owen Anderson3ea90a72008-07-03 17:44:33 +00001934 } else {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001935 localAvail[I->getParent()]->table.insert(std::make_pair(Num, I));
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001936 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001937
Owen Andersonab6ec2e2007-07-24 17:55:58 +00001938 return false;
1939}
1940
Bill Wendling456e8852008-12-22 22:32:22 +00001941/// runOnFunction - This is the main transformation entry point for a function.
Owen Anderson676070d2007-08-14 18:04:11 +00001942bool GVN::runOnFunction(Function& F) {
Dan Gohman81132462009-11-14 02:27:51 +00001943 if (!NoLoads)
1944 MD = &getAnalysis<MemoryDependenceAnalysis>();
Chris Lattner8541ede2008-12-01 00:40:32 +00001945 DT = &getAnalysis<DominatorTree>();
Owen Andersonf7928602008-05-12 20:15:55 +00001946 VN.setAliasAnalysis(&getAnalysis<AliasAnalysis>());
Chris Lattner8541ede2008-12-01 00:40:32 +00001947 VN.setMemDep(MD);
1948 VN.setDomTree(DT);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001949
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001950 bool Changed = false;
1951 bool ShouldContinue = true;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001952
Owen Andersonac310962008-07-16 17:52:31 +00001953 // Merge unconditional branches, allowing PRE to catch more
1954 // optimization opportunities.
1955 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001956 BasicBlock *BB = FI;
Owen Andersonac310962008-07-16 17:52:31 +00001957 ++FI;
Owen Andersonc0623812008-07-17 00:01:40 +00001958 bool removedBlock = MergeBlockIntoPredecessor(BB, this);
1959 if (removedBlock) NumGVNBlocks++;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001960
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001961 Changed |= removedBlock;
Owen Andersonac310962008-07-16 17:52:31 +00001962 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001963
Chris Lattner0a5a8d52008-12-09 19:21:47 +00001964 unsigned Iteration = 0;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001965
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001966 while (ShouldContinue) {
Dan Gohman1ddf98a2009-07-25 01:43:01 +00001967 DEBUG(errs() << "GVN iteration: " << Iteration << "\n");
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001968 ShouldContinue = iterateOnFunction(F);
1969 Changed |= ShouldContinue;
Chris Lattner0a5a8d52008-12-09 19:21:47 +00001970 ++Iteration;
Owen Anderson676070d2007-08-14 18:04:11 +00001971 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001972
Owen Anderson04a6e0b2008-07-18 18:03:38 +00001973 if (EnablePRE) {
Owen Anderson2fbfb702008-09-03 23:06:07 +00001974 bool PREChanged = true;
1975 while (PREChanged) {
1976 PREChanged = performPRE(F);
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001977 Changed |= PREChanged;
Owen Anderson2fbfb702008-09-03 23:06:07 +00001978 }
Owen Anderson04a6e0b2008-07-18 18:03:38 +00001979 }
Chris Lattner0a5a8d52008-12-09 19:21:47 +00001980 // FIXME: Should perform GVN again after PRE does something. PRE can move
1981 // computations into blocks where they become fully redundant. Note that
1982 // we can't do this until PRE's critical edge splitting updates memdep.
1983 // Actually, when this happens, we should just fully integrate PRE into GVN.
Nuno Lopese3127f32008-10-10 16:25:50 +00001984
1985 cleanupGlobalSets();
1986
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001987 return Changed;
Owen Anderson676070d2007-08-14 18:04:11 +00001988}
1989
1990
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001991bool GVN::processBlock(BasicBlock *BB) {
Chris Lattner0a5a8d52008-12-09 19:21:47 +00001992 // FIXME: Kill off toErase by doing erasing eagerly in a helper function (and
1993 // incrementing BI before processing an instruction).
Owen Andersonaccdca12008-06-12 19:25:32 +00001994 SmallVector<Instruction*, 8> toErase;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001995 bool ChangedFunction = false;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00001996
Owen Andersonaccdca12008-06-12 19:25:32 +00001997 for (BasicBlock::iterator BI = BB->begin(), BE = BB->end();
1998 BI != BE;) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00001999 ChangedFunction |= processInstruction(BI, toErase);
Owen Andersonaccdca12008-06-12 19:25:32 +00002000 if (toErase.empty()) {
2001 ++BI;
2002 continue;
2003 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002004
Owen Andersonaccdca12008-06-12 19:25:32 +00002005 // If we need some instructions deleted, do it now.
2006 NumGVNInstr += toErase.size();
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002007
Owen Andersonaccdca12008-06-12 19:25:32 +00002008 // Avoid iterator invalidation.
2009 bool AtStart = BI == BB->begin();
2010 if (!AtStart)
2011 --BI;
2012
2013 for (SmallVector<Instruction*, 4>::iterator I = toErase.begin(),
Chris Lattner8541ede2008-12-01 00:40:32 +00002014 E = toErase.end(); I != E; ++I) {
Dan Gohmanef3ef7f2009-07-31 20:24:18 +00002015 DEBUG(errs() << "GVN removed: " << **I << '\n');
Dan Gohman81132462009-11-14 02:27:51 +00002016 if (MD) MD->removeInstruction(*I);
Owen Andersonaccdca12008-06-12 19:25:32 +00002017 (*I)->eraseFromParent();
Bill Wendlingebb6a542008-12-22 21:57:30 +00002018 DEBUG(verifyRemoved(*I));
Chris Lattner8541ede2008-12-01 00:40:32 +00002019 }
Chris Lattner0a5a8d52008-12-09 19:21:47 +00002020 toErase.clear();
Owen Andersonaccdca12008-06-12 19:25:32 +00002021
2022 if (AtStart)
2023 BI = BB->begin();
2024 else
2025 ++BI;
Owen Andersonaccdca12008-06-12 19:25:32 +00002026 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002027
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002028 return ChangedFunction;
Owen Andersonaccdca12008-06-12 19:25:32 +00002029}
2030
Owen Anderson6a903bc2008-06-18 21:41:49 +00002031/// performPRE - Perform a purely local form of PRE that looks for diamond
2032/// control flow patterns and attempts to perform simple PRE at the join point.
Chris Lattnera546dcf2009-10-31 22:11:15 +00002033bool GVN::performPRE(Function &F) {
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002034 bool Changed = false;
Owen Andersonfdf9f162008-06-19 19:54:19 +00002035 SmallVector<std::pair<TerminatorInst*, unsigned>, 4> toSplit;
Chris Lattnerf00aae42008-12-01 07:29:03 +00002036 DenseMap<BasicBlock*, Value*> predMap;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002037 for (df_iterator<BasicBlock*> DI = df_begin(&F.getEntryBlock()),
2038 DE = df_end(&F.getEntryBlock()); DI != DE; ++DI) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002039 BasicBlock *CurrentBlock = *DI;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002040
Owen Anderson6a903bc2008-06-18 21:41:49 +00002041 // Nothing to PRE in the entry block.
2042 if (CurrentBlock == &F.getEntryBlock()) continue;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002043
Owen Anderson6a903bc2008-06-18 21:41:49 +00002044 for (BasicBlock::iterator BI = CurrentBlock->begin(),
2045 BE = CurrentBlock->end(); BI != BE; ) {
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002046 Instruction *CurInst = BI++;
Duncan Sands1efabaa2009-05-06 06:49:50 +00002047
Victor Hernandez8acf2952009-10-23 21:09:37 +00002048 if (isa<AllocaInst>(CurInst) ||
Victor Hernandez5d034492009-09-18 22:35:49 +00002049 isa<TerminatorInst>(CurInst) || isa<PHINode>(CurInst) ||
Devang Patel92f86192009-10-14 17:29:00 +00002050 CurInst->getType()->isVoidTy() ||
Duncan Sands1efabaa2009-05-06 06:49:50 +00002051 CurInst->mayReadFromMemory() || CurInst->mayHaveSideEffects() ||
John Criswell073e4d12009-03-10 15:04:53 +00002052 isa<DbgInfoIntrinsic>(CurInst))
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002053 continue;
Duncan Sands1efabaa2009-05-06 06:49:50 +00002054
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002055 uint32_t ValNo = VN.lookup(CurInst);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002056
Owen Anderson6a903bc2008-06-18 21:41:49 +00002057 // Look for the predecessors for PRE opportunities. We're
2058 // only trying to solve the basic diamond case, where
2059 // a value is computed in the successor and one predecessor,
2060 // but not the other. We also explicitly disallow cases
2061 // where the successor is its own predecessor, because they're
2062 // more complicated to get right.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002063 unsigned NumWith = 0;
2064 unsigned NumWithout = 0;
2065 BasicBlock *PREPred = 0;
Chris Lattnerf00aae42008-12-01 07:29:03 +00002066 predMap.clear();
2067
Owen Anderson6a903bc2008-06-18 21:41:49 +00002068 for (pred_iterator PI = pred_begin(CurrentBlock),
2069 PE = pred_end(CurrentBlock); PI != PE; ++PI) {
2070 // We're not interested in PRE where the block is its
Owen Anderson1b3ea962008-06-20 01:15:47 +00002071 // own predecessor, on in blocks with predecessors
2072 // that are not reachable.
2073 if (*PI == CurrentBlock) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002074 NumWithout = 2;
Owen Anderson1b3ea962008-06-20 01:15:47 +00002075 break;
2076 } else if (!localAvail.count(*PI)) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002077 NumWithout = 2;
Owen Anderson1b3ea962008-06-20 01:15:47 +00002078 break;
2079 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002080
2081 DenseMap<uint32_t, Value*>::iterator predV =
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002082 localAvail[*PI]->table.find(ValNo);
Owen Anderson1b3ea962008-06-20 01:15:47 +00002083 if (predV == localAvail[*PI]->table.end()) {
Owen Anderson6a903bc2008-06-18 21:41:49 +00002084 PREPred = *PI;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002085 NumWithout++;
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002086 } else if (predV->second == CurInst) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002087 NumWithout = 2;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002088 } else {
Owen Anderson1b3ea962008-06-20 01:15:47 +00002089 predMap[*PI] = predV->second;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002090 NumWith++;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002091 }
2092 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002093
Owen Anderson6a903bc2008-06-18 21:41:49 +00002094 // Don't do PRE when it might increase code size, i.e. when
2095 // we would need to insert instructions in more than one pred.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002096 if (NumWithout != 1 || NumWith == 0)
Owen Anderson6a903bc2008-06-18 21:41:49 +00002097 continue;
Chris Lattnera546dcf2009-10-31 22:11:15 +00002098
2099 // Don't do PRE across indirect branch.
2100 if (isa<IndirectBrInst>(PREPred->getTerminator()))
2101 continue;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002102
Owen Andersonfdf9f162008-06-19 19:54:19 +00002103 // We can't do PRE safely on a critical edge, so instead we schedule
2104 // the edge to be split and perform the PRE the next time we iterate
2105 // on the function.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002106 unsigned SuccNum = 0;
Owen Andersonfdf9f162008-06-19 19:54:19 +00002107 for (unsigned i = 0, e = PREPred->getTerminator()->getNumSuccessors();
2108 i != e; ++i)
Owen Anderson2fbfb702008-09-03 23:06:07 +00002109 if (PREPred->getTerminator()->getSuccessor(i) == CurrentBlock) {
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002110 SuccNum = i;
Owen Andersonfdf9f162008-06-19 19:54:19 +00002111 break;
2112 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002113
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002114 if (isCriticalEdge(PREPred->getTerminator(), SuccNum)) {
2115 toSplit.push_back(std::make_pair(PREPred->getTerminator(), SuccNum));
Owen Andersonfdf9f162008-06-19 19:54:19 +00002116 continue;
2117 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002118
Owen Anderson6a903bc2008-06-18 21:41:49 +00002119 // Instantiate the expression the in predecessor that lacked it.
2120 // Because we are going top-down through the block, all value numbers
2121 // will be available in the predecessor by the time we need them. Any
2122 // that weren't original present will have been instantiated earlier
2123 // in this loop.
Nick Lewycky42fb7452009-09-27 07:38:41 +00002124 Instruction *PREInstr = CurInst->clone();
Owen Anderson6a903bc2008-06-18 21:41:49 +00002125 bool success = true;
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002126 for (unsigned i = 0, e = CurInst->getNumOperands(); i != e; ++i) {
2127 Value *Op = PREInstr->getOperand(i);
2128 if (isa<Argument>(Op) || isa<Constant>(Op) || isa<GlobalValue>(Op))
2129 continue;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002130
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002131 if (Value *V = lookupNumber(PREPred, VN.lookup(Op))) {
2132 PREInstr->setOperand(i, V);
2133 } else {
2134 success = false;
2135 break;
Owen Anderson8e462e92008-07-11 20:05:13 +00002136 }
Owen Anderson6a903bc2008-06-18 21:41:49 +00002137 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002138
Owen Anderson6a903bc2008-06-18 21:41:49 +00002139 // Fail out if we encounter an operand that is not available in
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002140 // the PRE predecessor. This is typically because of loads which
Owen Anderson6a903bc2008-06-18 21:41:49 +00002141 // are not value numbered precisely.
2142 if (!success) {
2143 delete PREInstr;
Bill Wendling3c793442008-12-22 22:14:07 +00002144 DEBUG(verifyRemoved(PREInstr));
Owen Anderson6a903bc2008-06-18 21:41:49 +00002145 continue;
2146 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002147
Owen Anderson6a903bc2008-06-18 21:41:49 +00002148 PREInstr->insertBefore(PREPred->getTerminator());
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002149 PREInstr->setName(CurInst->getName() + ".pre");
Owen Anderson1b3ea962008-06-20 01:15:47 +00002150 predMap[PREPred] = PREInstr;
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002151 VN.add(PREInstr, ValNo);
Owen Anderson6a903bc2008-06-18 21:41:49 +00002152 NumGVNPRE++;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002153
Owen Anderson6a903bc2008-06-18 21:41:49 +00002154 // Update the availability map to include the new instruction.
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002155 localAvail[PREPred]->table.insert(std::make_pair(ValNo, PREInstr));
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002156
Owen Anderson6a903bc2008-06-18 21:41:49 +00002157 // Create a PHI to make the value available in this block.
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002158 PHINode* Phi = PHINode::Create(CurInst->getType(),
2159 CurInst->getName() + ".pre-phi",
Owen Anderson6a903bc2008-06-18 21:41:49 +00002160 CurrentBlock->begin());
2161 for (pred_iterator PI = pred_begin(CurrentBlock),
2162 PE = pred_end(CurrentBlock); PI != PE; ++PI)
Owen Anderson1b3ea962008-06-20 01:15:47 +00002163 Phi->addIncoming(predMap[*PI], *PI);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002164
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002165 VN.add(Phi, ValNo);
2166 localAvail[CurrentBlock]->table[ValNo] = Phi;
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002167
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002168 CurInst->replaceAllUsesWith(Phi);
Dan Gohman81132462009-11-14 02:27:51 +00002169 if (MD && isa<PointerType>(Phi->getType()))
Chris Lattnerfa9f99a2008-12-09 22:06:23 +00002170 MD->invalidateCachedPointerInfo(Phi);
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002171 VN.erase(CurInst);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002172
Dan Gohmanef3ef7f2009-07-31 20:24:18 +00002173 DEBUG(errs() << "GVN PRE removed: " << *CurInst << '\n');
Dan Gohman81132462009-11-14 02:27:51 +00002174 if (MD) MD->removeInstruction(CurInst);
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002175 CurInst->eraseFromParent();
Bill Wendlingebb6a542008-12-22 21:57:30 +00002176 DEBUG(verifyRemoved(CurInst));
Chris Lattner6f5bf6a2008-12-01 07:35:54 +00002177 Changed = true;
Owen Anderson6a903bc2008-06-18 21:41:49 +00002178 }
2179 }
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002180
Owen Andersonfdf9f162008-06-19 19:54:19 +00002181 for (SmallVector<std::pair<TerminatorInst*, unsigned>, 4>::iterator
Anton Korobeynikov24600bf2008-12-05 19:38:49 +00002182 I = toSplit.begin(), E = toSplit.end(); I != E; ++I)
Owen Andersonfdf9f162008-06-19 19:54:19 +00002183 SplitCriticalEdge(I->first, I->second, this);
Daniel Dunbar7d6781b2009-09-20 02:20:51 +00002184
Anton Korobeynikov24600bf2008-12-05 19:38:49 +00002185 return Changed || toSplit.size();
Owen Anderson6a903bc2008-06-18 21:41:49 +00002186}
2187
Bill Wendling456e8852008-12-22 22:32:22 +00002188/// iterateOnFunction - Executes one iteration of GVN
Owen Anderson676070d2007-08-14 18:04:11 +00002189bool GVN::iterateOnFunction(Function &F) {
Nuno Lopese3127f32008-10-10 16:25:50 +00002190 cleanupGlobalSets();
Chris Lattnerbeb216d2008-03-21 21:33:23 +00002191
Owen Anderson98f912b2009-04-01 23:53:49 +00002192 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2193 DE = df_end(DT->getRootNode()); DI != DE; ++DI) {
2194 if (DI->getIDom())
2195 localAvail[DI->getBlock()] =
2196 new ValueNumberScope(localAvail[DI->getIDom()->getBlock()]);
2197 else
2198 localAvail[DI->getBlock()] = new ValueNumberScope(0);
2199 }
2200
Owen Andersonab6ec2e2007-07-24 17:55:58 +00002201 // Top-down walk of the dominator tree
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002202 bool Changed = false;
Owen Anderson03aacba2008-12-15 03:52:17 +00002203#if 0
2204 // Needed for value numbering with phi construction to work.
Owen Andersonbfe133e2008-12-15 02:03:00 +00002205 ReversePostOrderTraversal<Function*> RPOT(&F);
2206 for (ReversePostOrderTraversal<Function*>::rpo_iterator RI = RPOT.begin(),
2207 RE = RPOT.end(); RI != RE; ++RI)
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002208 Changed |= processBlock(*RI);
Owen Anderson03aacba2008-12-15 03:52:17 +00002209#else
2210 for (df_iterator<DomTreeNode*> DI = df_begin(DT->getRootNode()),
2211 DE = df_end(DT->getRootNode()); DI != DE; ++DI)
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002212 Changed |= processBlock(DI->getBlock());
Owen Anderson03aacba2008-12-15 03:52:17 +00002213#endif
2214
Chris Lattner1eefa9c2009-09-21 02:42:51 +00002215 return Changed;
Owen Andersonab6ec2e2007-07-24 17:55:58 +00002216}
Nuno Lopese3127f32008-10-10 16:25:50 +00002217
2218void GVN::cleanupGlobalSets() {
2219 VN.clear();
Nuno Lopese3127f32008-10-10 16:25:50 +00002220
2221 for (DenseMap<BasicBlock*, ValueNumberScope*>::iterator
2222 I = localAvail.begin(), E = localAvail.end(); I != E; ++I)
2223 delete I->second;
2224 localAvail.clear();
2225}
Bill Wendling6b18a392008-12-22 21:36:08 +00002226
2227/// verifyRemoved - Verify that the specified instruction does not occur in our
2228/// internal data structures.
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002229void GVN::verifyRemoved(const Instruction *Inst) const {
2230 VN.verifyRemoved(Inst);
Bill Wendling3c793442008-12-22 22:14:07 +00002231
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002232 // Walk through the value number scope to make sure the instruction isn't
2233 // ferreted away in it.
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +00002234 for (DenseMap<BasicBlock*, ValueNumberScope*>::const_iterator
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002235 I = localAvail.begin(), E = localAvail.end(); I != E; ++I) {
2236 const ValueNumberScope *VNS = I->second;
2237
2238 while (VNS) {
Jeffrey Yasskinb40d3f72009-11-10 01:02:17 +00002239 for (DenseMap<uint32_t, Value*>::const_iterator
Bill Wendlinge7f08e72008-12-22 22:28:56 +00002240 II = VNS->table.begin(), IE = VNS->table.end(); II != IE; ++II) {
2241 assert(II->second != Inst && "Inst still in value numbering scope!");
2242 }
2243
2244 VNS = VNS->parent;
Bill Wendling3c793442008-12-22 22:14:07 +00002245 }
2246 }
Bill Wendling6b18a392008-12-22 21:36:08 +00002247}