Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame^] | 1 | //===- MemoryDependenceAnalysis.cpp - Mem Deps Implementation --*- C++ -*-===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file was developed by the Owen Anderson and is distributed under |
| 6 | // the University of Illinois Open Source License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file implements an analysis that determines, for a given memory |
| 11 | // operation, what preceding memory operations it depends on. It builds on |
| 12 | // alias analysis information, and tries to provide a lazy, caching interface to |
| 13 | // a common kind of alias information query. |
| 14 | // |
| 15 | //===----------------------------------------------------------------------===// |
| 16 | |
| 17 | #include "llvm/Analysis/MemoryDependenceAnalysis.h" |
| 18 | #include "llvm/Constants.h" |
| 19 | #include "llvm/Instructions.h" |
| 20 | #include "llvm/Function.h" |
| 21 | #include "llvm/Analysis/AliasAnalysis.h" |
| 22 | #include "llvm/Target/TargetData.h" |
| 23 | |
| 24 | using namespace llvm; |
| 25 | |
| 26 | char MemoryDependenceAnalysis::ID = 0; |
| 27 | |
| 28 | Instruction* MemoryDependenceAnalysis::NonLocal = (Instruction*)0; |
| 29 | Instruction* MemoryDependenceAnalysis::None = (Instruction*)~0; |
| 30 | |
| 31 | // Register this pass... |
| 32 | static RegisterPass<MemoryDependenceAnalysis> X("memdep", |
| 33 | "Memory Dependence Analysis"); |
| 34 | |
| 35 | /// getAnalysisUsage - Does not modify anything. It uses Alias Analysis. |
| 36 | /// |
| 37 | void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { |
| 38 | AU.setPreservesAll(); |
| 39 | AU.addRequiredTransitive<AliasAnalysis>(); |
| 40 | AU.addRequiredTransitive<TargetData>(); |
| 41 | } |
| 42 | |
| 43 | // Find the dependency of a CallSite |
| 44 | Instruction* MemoryDependenceAnalysis::getCallSiteDependency(CallSite C, Instruction* start, |
| 45 | bool local) { |
| 46 | assert(local && "Non-local memory dependence analysis not yet implemented"); |
| 47 | |
| 48 | AliasAnalysis& AA = getAnalysis<AliasAnalysis>(); |
| 49 | TargetData& TD = getAnalysis<TargetData>(); |
| 50 | BasicBlock::iterator blockBegin = C.getInstruction()->getParent()->begin(); |
| 51 | BasicBlock::iterator QI = C.getInstruction(); |
| 52 | |
| 53 | while (QI != blockBegin) { |
| 54 | --QI; |
| 55 | |
| 56 | // If this inst is a memory op, get the pointer it accessed |
| 57 | Value* pointer = 0; |
| 58 | uint64_t pointerSize = 0; |
| 59 | if (StoreInst* S = dyn_cast<StoreInst>(QI)) { |
| 60 | pointer = S->getPointerOperand(); |
| 61 | pointerSize = TD.getTypeSize(S->getOperand(0)->getType()); |
| 62 | } else if (LoadInst* L = dyn_cast<LoadInst>(QI)) { |
| 63 | pointer = L->getPointerOperand(); |
| 64 | pointerSize = TD.getTypeSize(L->getType()); |
| 65 | } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) { |
| 66 | pointer = AI; |
| 67 | if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize())) |
| 68 | pointerSize = C->getZExtValue() * TD.getTypeSize(AI->getAllocatedType()); |
| 69 | else |
| 70 | pointerSize = ~0UL; |
| 71 | } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) { |
| 72 | pointer = V->getOperand(0); |
| 73 | pointerSize = TD.getTypeSize(V->getType()); |
| 74 | } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) { |
| 75 | pointer = F->getPointerOperand(); |
| 76 | |
| 77 | // FreeInsts erase the entire structure |
| 78 | pointerSize = ~0UL; |
| 79 | } else if (CallSite::get(QI).getInstruction() != 0) { |
| 80 | if (AA.getModRefInfo(C, CallSite::get(QI)) != AliasAnalysis::NoModRef) { |
| 81 | depGraphLocal.insert(std::make_pair(C.getInstruction(), std::make_pair(QI, true))); |
| 82 | reverseDep.insert(std::make_pair(QI, C.getInstruction())); |
| 83 | return QI; |
| 84 | } else { |
| 85 | continue; |
| 86 | } |
| 87 | } else |
| 88 | continue; |
| 89 | |
| 90 | if (AA.getModRefInfo(C, pointer, pointerSize) != AliasAnalysis::NoModRef) { |
| 91 | depGraphLocal.insert(std::make_pair(C.getInstruction(), std::make_pair(QI, true))); |
| 92 | reverseDep.insert(std::make_pair(QI, C.getInstruction())); |
| 93 | return QI; |
| 94 | } |
| 95 | } |
| 96 | |
| 97 | // No dependence found |
| 98 | depGraphLocal.insert(std::make_pair(C.getInstruction(), std::make_pair(NonLocal, true))); |
| 99 | reverseDep.insert(std::make_pair(NonLocal, C.getInstruction())); |
| 100 | return NonLocal; |
| 101 | } |
| 102 | |
| 103 | /// getDependency - Return the instruction on which a memory operation |
| 104 | /// depends. The local paramter indicates if the query should only |
| 105 | /// evaluate dependencies within the same basic block. |
| 106 | Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query, |
| 107 | Instruction* start, |
| 108 | bool local) { |
| 109 | if (!local) |
| 110 | assert(0 && "Non-local memory dependence is not yet supported."); |
| 111 | |
| 112 | // Start looking for dependencies with the queried inst |
| 113 | BasicBlock::iterator QI = query; |
| 114 | |
| 115 | // Check for a cached result |
| 116 | std::pair<Instruction*, bool> cachedResult = depGraphLocal[query]; |
| 117 | // If we have a _confirmed_ cached entry, return it |
| 118 | if (cachedResult.second) |
| 119 | return cachedResult.first; |
| 120 | else if (cachedResult.first != NonLocal) |
| 121 | // If we have an unconfirmed cached entry, we can start our search from there |
| 122 | QI = cachedResult.first; |
| 123 | |
| 124 | if (start) |
| 125 | QI = start; |
| 126 | |
| 127 | AliasAnalysis& AA = getAnalysis<AliasAnalysis>(); |
| 128 | TargetData& TD = getAnalysis<TargetData>(); |
| 129 | |
| 130 | // Get the pointer value for which dependence will be determined |
| 131 | Value* dependee = 0; |
| 132 | uint64_t dependeeSize = 0; |
| 133 | bool queryIsVolatile = false; |
| 134 | if (StoreInst* S = dyn_cast<StoreInst>(query)) { |
| 135 | dependee = S->getPointerOperand(); |
| 136 | dependeeSize = TD.getTypeSize(S->getOperand(0)->getType()); |
| 137 | queryIsVolatile = S->isVolatile(); |
| 138 | } else if (LoadInst* L = dyn_cast<LoadInst>(query)) { |
| 139 | dependee = L->getPointerOperand(); |
| 140 | dependeeSize = TD.getTypeSize(L->getType()); |
| 141 | queryIsVolatile = L->isVolatile(); |
| 142 | } else if (VAArgInst* V = dyn_cast<VAArgInst>(query)) { |
| 143 | dependee = V->getOperand(0); |
| 144 | dependeeSize = TD.getTypeSize(V->getType()); |
| 145 | } else if (FreeInst* F = dyn_cast<FreeInst>(query)) { |
| 146 | dependee = F->getPointerOperand(); |
| 147 | |
| 148 | // FreeInsts erase the entire structure, not just a field |
| 149 | dependeeSize = ~0UL; |
| 150 | } else if (CallSite::get(query).getInstruction() != 0) |
| 151 | return getCallSiteDependency(CallSite::get(query), start); |
| 152 | else if (isa<AllocationInst>(query)) |
| 153 | return None; |
| 154 | else |
| 155 | return None; |
| 156 | |
| 157 | BasicBlock::iterator blockBegin = query->getParent()->begin(); |
| 158 | |
| 159 | while (QI != blockBegin) { |
| 160 | --QI; |
| 161 | |
| 162 | // If this inst is a memory op, get the pointer it accessed |
| 163 | Value* pointer = 0; |
| 164 | uint64_t pointerSize = 0; |
| 165 | if (StoreInst* S = dyn_cast<StoreInst>(QI)) { |
| 166 | // All volatile loads/stores depend on each other |
| 167 | if (queryIsVolatile && S->isVolatile()) { |
| 168 | if (!start) { |
| 169 | depGraphLocal.insert(std::make_pair(query, std::make_pair(S, true))); |
| 170 | reverseDep.insert(std::make_pair(S, query)); |
| 171 | } |
| 172 | |
| 173 | return S; |
| 174 | } |
| 175 | |
| 176 | pointer = S->getPointerOperand(); |
| 177 | pointerSize = TD.getTypeSize(S->getOperand(0)->getType()); |
| 178 | } else if (LoadInst* L = dyn_cast<LoadInst>(QI)) { |
| 179 | // All volatile loads/stores depend on each other |
| 180 | if (queryIsVolatile && L->isVolatile()) { |
| 181 | if (!start) { |
| 182 | depGraphLocal.insert(std::make_pair(query, std::make_pair(L, true))); |
| 183 | reverseDep.insert(std::make_pair(L, query)); |
| 184 | } |
| 185 | |
| 186 | return L; |
| 187 | } |
| 188 | |
| 189 | pointer = L->getPointerOperand(); |
| 190 | pointerSize = TD.getTypeSize(L->getType()); |
| 191 | } else if (AllocationInst* AI = dyn_cast<AllocationInst>(QI)) { |
| 192 | pointer = AI; |
| 193 | if (ConstantInt* C = dyn_cast<ConstantInt>(AI->getArraySize())) |
| 194 | pointerSize = C->getZExtValue() * TD.getTypeSize(AI->getAllocatedType()); |
| 195 | else |
| 196 | pointerSize = ~0UL; |
| 197 | } else if (VAArgInst* V = dyn_cast<VAArgInst>(QI)) { |
| 198 | pointer = V->getOperand(0); |
| 199 | pointerSize = TD.getTypeSize(V->getType()); |
| 200 | } else if (FreeInst* F = dyn_cast<FreeInst>(QI)) { |
| 201 | pointer = F->getPointerOperand(); |
| 202 | |
| 203 | // FreeInsts erase the entire structure |
| 204 | pointerSize = ~0UL; |
| 205 | } else if (CallSite::get(QI).getInstruction() != 0) { |
| 206 | // Call insts need special handling. Check is they can modify our pointer |
| 207 | if (AA.getModRefInfo(CallSite::get(QI), dependee, dependeeSize) != |
| 208 | AliasAnalysis::NoModRef) { |
| 209 | if (!start) { |
| 210 | depGraphLocal.insert(std::make_pair(query, std::make_pair(QI, true))); |
| 211 | reverseDep.insert(std::make_pair(QI, query)); |
| 212 | } |
| 213 | |
| 214 | return QI; |
| 215 | } else { |
| 216 | continue; |
| 217 | } |
| 218 | } |
| 219 | |
| 220 | // If we found a pointer, check if it could be the same as our pointer |
| 221 | if (pointer) { |
| 222 | AliasAnalysis::AliasResult R = AA.alias(pointer, pointerSize, |
| 223 | dependee, dependeeSize); |
| 224 | |
| 225 | if (R != AliasAnalysis::NoAlias) { |
| 226 | if (!start) { |
| 227 | depGraphLocal.insert(std::make_pair(query, std::make_pair(QI, true))); |
| 228 | reverseDep.insert(std::make_pair(QI, query)); |
| 229 | } |
| 230 | |
| 231 | return QI; |
| 232 | } |
| 233 | } |
| 234 | } |
| 235 | |
| 236 | // If we found nothing, return the non-local flag |
| 237 | if (!start) { |
| 238 | depGraphLocal.insert(std::make_pair(query, |
| 239 | std::make_pair(NonLocal, true))); |
| 240 | reverseDep.insert(std::make_pair(NonLocal, query)); |
| 241 | } |
| 242 | |
| 243 | return NonLocal; |
| 244 | } |
| 245 | |
| 246 | /// removeInstruction - Remove an instruction from the dependence analysis, |
| 247 | /// updating the dependence of instructions that previously depended on it. |
| 248 | void MemoryDependenceAnalysis::removeInstruction(Instruction* rem) { |
| 249 | // Figure out the new dep for things that currently depend on rem |
| 250 | Instruction* newDep = NonLocal; |
| 251 | if (depGraphLocal[rem].first != NonLocal) { |
| 252 | // If we have dep info for rem, set them to it |
| 253 | BasicBlock::iterator RI = depGraphLocal[rem].first; |
| 254 | RI++; |
| 255 | newDep = RI; |
| 256 | } else if (depGraphLocal[rem].first == NonLocal && |
| 257 | depGraphLocal[rem].second ) { |
| 258 | // If we have a confirmed non-local flag, use it |
| 259 | newDep = NonLocal; |
| 260 | } else { |
| 261 | // Otherwise, use the immediate successor of rem |
| 262 | // NOTE: This is because, when getDependence is called, it will first check |
| 263 | // the immediate predecessor of what is in the cache. |
| 264 | BasicBlock::iterator RI = rem; |
| 265 | RI++; |
| 266 | newDep = RI; |
| 267 | } |
| 268 | |
| 269 | std::multimap<Instruction*, Instruction*>::iterator I = reverseDep.find(rem); |
| 270 | while (I->first == rem) { |
| 271 | // Insert the new dependencies |
| 272 | // Mark it as unconfirmed as long as it is not the non-local flag |
| 273 | depGraphLocal[I->second] = std::make_pair(newDep, !newDep); |
| 274 | reverseDep.erase(I); |
| 275 | I = reverseDep.find(rem); |
| 276 | } |
| 277 | |
| 278 | getAnalysis<AliasAnalysis>().deleteValue(rem); |
| 279 | } |