Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 1 | //===-- NVPTXInferAddressSpace.cpp - ---------------------*- C++ -*-===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // CUDA C/C++ includes memory space designation as variable type qualifers (such |
| 11 | // as __global__ and __shared__). Knowing the space of a memory access allows |
| 12 | // CUDA compilers to emit faster PTX loads and stores. For example, a load from |
| 13 | // shared memory can be translated to `ld.shared` which is roughly 10% faster |
| 14 | // than a generic `ld` on an NVIDIA Tesla K40c. |
| 15 | // |
| 16 | // Unfortunately, type qualifiers only apply to variable declarations, so CUDA |
| 17 | // compilers must infer the memory space of an address expression from |
| 18 | // type-qualified variables. |
| 19 | // |
| 20 | // LLVM IR uses non-zero (so-called) specific address spaces to represent memory |
| 21 | // spaces (e.g. addrspace(3) means shared memory). The Clang frontend |
| 22 | // places only type-qualified variables in specific address spaces, and then |
| 23 | // conservatively `addrspacecast`s each type-qualified variable to addrspace(0) |
| 24 | // (so-called the generic address space) for other instructions to use. |
| 25 | // |
| 26 | // For example, the Clang translates the following CUDA code |
| 27 | // __shared__ float a[10]; |
| 28 | // float v = a[i]; |
| 29 | // to |
| 30 | // %0 = addrspacecast [10 x float] addrspace(3)* @a to [10 x float]* |
| 31 | // %1 = gep [10 x float], [10 x float]* %0, i64 0, i64 %i |
| 32 | // %v = load float, float* %1 ; emits ld.f32 |
| 33 | // @a is in addrspace(3) since it's type-qualified, but its use from %1 is |
| 34 | // redirected to %0 (the generic version of @a). |
| 35 | // |
| 36 | // The optimization implemented in this file propagates specific address spaces |
| 37 | // from type-qualified variable declarations to its users. For example, it |
| 38 | // optimizes the above IR to |
| 39 | // %1 = gep [10 x float] addrspace(3)* @a, i64 0, i64 %i |
| 40 | // %v = load float addrspace(3)* %1 ; emits ld.shared.f32 |
| 41 | // propagating the addrspace(3) from @a to %1. As the result, the NVPTX |
| 42 | // codegen is able to emit ld.shared.f32 for %v. |
| 43 | // |
| 44 | // Address space inference works in two steps. First, it uses a data-flow |
| 45 | // analysis to infer as many generic pointers as possible to point to only one |
| 46 | // specific address space. In the above example, it can prove that %1 only |
| 47 | // points to addrspace(3). This algorithm was published in |
| 48 | // CUDA: Compiling and optimizing for a GPU platform |
| 49 | // Chakrabarti, Grover, Aarts, Kong, Kudlur, Lin, Marathe, Murphy, Wang |
| 50 | // ICCS 2012 |
| 51 | // |
| 52 | // Then, address space inference replaces all refinable generic pointers with |
| 53 | // equivalent specific pointers. |
| 54 | // |
| 55 | // The major challenge of implementing this optimization is handling PHINodes, |
| 56 | // which may create loops in the data flow graph. This brings two complications. |
| 57 | // |
| 58 | // First, the data flow analysis in Step 1 needs to be circular. For example, |
| 59 | // %generic.input = addrspacecast float addrspace(3)* %input to float* |
| 60 | // loop: |
| 61 | // %y = phi [ %generic.input, %y2 ] |
| 62 | // %y2 = getelementptr %y, 1 |
| 63 | // %v = load %y2 |
| 64 | // br ..., label %loop, ... |
| 65 | // proving %y specific requires proving both %generic.input and %y2 specific, |
| 66 | // but proving %y2 specific circles back to %y. To address this complication, |
| 67 | // the data flow analysis operates on a lattice: |
| 68 | // uninitialized > specific address spaces > generic. |
| 69 | // All address expressions (our implementation only considers phi, bitcast, |
| 70 | // addrspacecast, and getelementptr) start with the uninitialized address space. |
| 71 | // The monotone transfer function moves the address space of a pointer down a |
| 72 | // lattice path from uninitialized to specific and then to generic. A join |
| 73 | // operation of two different specific address spaces pushes the expression down |
| 74 | // to the generic address space. The analysis completes once it reaches a fixed |
| 75 | // point. |
| 76 | // |
| 77 | // Second, IR rewriting in Step 2 also needs to be circular. For example, |
| 78 | // converting %y to addrspace(3) requires the compiler to know the converted |
| 79 | // %y2, but converting %y2 needs the converted %y. To address this complication, |
| 80 | // we break these cycles using "undef" placeholders. When converting an |
| 81 | // instruction `I` to a new address space, if its operand `Op` is not converted |
| 82 | // yet, we let `I` temporarily use `undef` and fix all the uses of undef later. |
| 83 | // For instance, our algorithm first converts %y to |
| 84 | // %y' = phi float addrspace(3)* [ %input, undef ] |
| 85 | // Then, it converts %y2 to |
| 86 | // %y2' = getelementptr %y', 1 |
| 87 | // Finally, it fixes the undef in %y' so that |
| 88 | // %y' = phi float addrspace(3)* [ %input, %y2' ] |
| 89 | // |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 90 | //===----------------------------------------------------------------------===// |
| 91 | |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 92 | #include "llvm/Transforms/Scalar.h" |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 93 | #include "llvm/ADT/DenseSet.h" |
| 94 | #include "llvm/ADT/Optional.h" |
| 95 | #include "llvm/ADT/SetVector.h" |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 96 | #include "llvm/Analysis/TargetTransformInfo.h" |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 97 | #include "llvm/IR/Function.h" |
| 98 | #include "llvm/IR/InstIterator.h" |
| 99 | #include "llvm/IR/Instructions.h" |
| 100 | #include "llvm/IR/Operator.h" |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 101 | #include "llvm/Support/Debug.h" |
| 102 | #include "llvm/Support/raw_ostream.h" |
| 103 | #include "llvm/Transforms/Utils/Local.h" |
| 104 | #include "llvm/Transforms/Utils/ValueMapper.h" |
| 105 | |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 106 | #define DEBUG_TYPE "infer-address-spaces" |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 107 | |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 108 | using namespace llvm; |
| 109 | |
| 110 | namespace { |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 111 | static const unsigned UnknownAddressSpace = ~0u; |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 112 | |
| 113 | using ValueToAddrSpaceMapTy = DenseMap<const Value *, unsigned>; |
| 114 | |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 115 | /// \brief InferAddressSpaces |
| 116 | class InferAddressSpaces: public FunctionPass { |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 117 | /// Target specific address space which uses of should be replaced if |
| 118 | /// possible. |
| 119 | unsigned FlatAddrSpace; |
| 120 | |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 121 | public: |
| 122 | static char ID; |
| 123 | |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 124 | InferAddressSpaces() : FunctionPass(ID) {} |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 125 | |
Matt Arsenault | 32b9600 | 2017-01-27 17:30:39 +0000 | [diff] [blame] | 126 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
| 127 | AU.setPreservesCFG(); |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 128 | AU.addRequired<TargetTransformInfoWrapperPass>(); |
Matt Arsenault | 32b9600 | 2017-01-27 17:30:39 +0000 | [diff] [blame] | 129 | } |
| 130 | |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 131 | bool runOnFunction(Function &F) override; |
| 132 | |
| 133 | private: |
| 134 | // Returns the new address space of V if updated; otherwise, returns None. |
| 135 | Optional<unsigned> |
| 136 | updateAddressSpace(const Value &V, |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 137 | const ValueToAddrSpaceMapTy &InferredAddrSpace) const; |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 138 | |
| 139 | // Tries to infer the specific address space of each address expression in |
| 140 | // Postorder. |
| 141 | void inferAddressSpaces(const std::vector<Value *> &Postorder, |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 142 | ValueToAddrSpaceMapTy *InferredAddrSpace) const; |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 143 | |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 144 | // Changes the flat address expressions in function F to point to specific |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 145 | // address spaces if InferredAddrSpace says so. Postorder is the postorder of |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 146 | // all flat expressions in the use-def graph of function F. |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 147 | bool |
| 148 | rewriteWithNewAddressSpaces(const std::vector<Value *> &Postorder, |
| 149 | const ValueToAddrSpaceMapTy &InferredAddrSpace, |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 150 | Function *F) const; |
| 151 | |
| 152 | void appendsFlatAddressExpressionToPostorderStack( |
| 153 | Value *V, std::vector<std::pair<Value *, bool>> *PostorderStack, |
| 154 | DenseSet<Value *> *Visited) const; |
| 155 | |
Matt Arsenault | 6d5a8d4 | 2017-01-31 01:56:57 +0000 | [diff] [blame^] | 156 | bool rewriteIntrinsicOperands(IntrinsicInst *II, |
| 157 | Value *OldV, Value *NewV) const; |
| 158 | void collectRewritableIntrinsicOperands( |
| 159 | IntrinsicInst *II, |
| 160 | std::vector<std::pair<Value *, bool>> *PostorderStack, |
| 161 | DenseSet<Value *> *Visited) const; |
| 162 | |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 163 | std::vector<Value *> collectFlatAddressExpressions(Function &F) const; |
Matt Arsenault | 6d5a8d4 | 2017-01-31 01:56:57 +0000 | [diff] [blame^] | 164 | |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 165 | Value *cloneValueWithNewAddressSpace( |
| 166 | Value *V, unsigned NewAddrSpace, |
| 167 | const ValueToValueMapTy &ValueWithNewAddrSpace, |
| 168 | SmallVectorImpl<const Use *> *UndefUsesToFix) const; |
| 169 | unsigned joinAddressSpaces(unsigned AS1, unsigned AS2) const; |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 170 | }; |
| 171 | } // end anonymous namespace |
| 172 | |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 173 | char InferAddressSpaces::ID = 0; |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 174 | |
| 175 | namespace llvm { |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 176 | void initializeInferAddressSpacesPass(PassRegistry &); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 177 | } |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 178 | |
| 179 | INITIALIZE_PASS(InferAddressSpaces, DEBUG_TYPE, "Infer address spaces", |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 180 | false, false) |
| 181 | |
| 182 | // Returns true if V is an address expression. |
| 183 | // TODO: Currently, we consider only phi, bitcast, addrspacecast, and |
| 184 | // getelementptr operators. |
| 185 | static bool isAddressExpression(const Value &V) { |
| 186 | if (!isa<Operator>(V)) |
| 187 | return false; |
| 188 | |
| 189 | switch (cast<Operator>(V).getOpcode()) { |
| 190 | case Instruction::PHI: |
| 191 | case Instruction::BitCast: |
| 192 | case Instruction::AddrSpaceCast: |
| 193 | case Instruction::GetElementPtr: |
| 194 | return true; |
| 195 | default: |
| 196 | return false; |
| 197 | } |
| 198 | } |
| 199 | |
| 200 | // Returns the pointer operands of V. |
| 201 | // |
| 202 | // Precondition: V is an address expression. |
| 203 | static SmallVector<Value *, 2> getPointerOperands(const Value &V) { |
| 204 | assert(isAddressExpression(V)); |
| 205 | const Operator& Op = cast<Operator>(V); |
| 206 | switch (Op.getOpcode()) { |
| 207 | case Instruction::PHI: { |
| 208 | auto IncomingValues = cast<PHINode>(Op).incoming_values(); |
| 209 | return SmallVector<Value *, 2>(IncomingValues.begin(), |
| 210 | IncomingValues.end()); |
| 211 | } |
| 212 | case Instruction::BitCast: |
| 213 | case Instruction::AddrSpaceCast: |
| 214 | case Instruction::GetElementPtr: |
| 215 | return {Op.getOperand(0)}; |
| 216 | default: |
| 217 | llvm_unreachable("Unexpected instruction type."); |
| 218 | } |
| 219 | } |
| 220 | |
Matt Arsenault | 6d5a8d4 | 2017-01-31 01:56:57 +0000 | [diff] [blame^] | 221 | // TODO: Move logic to TTI? |
| 222 | bool InferAddressSpaces::rewriteIntrinsicOperands(IntrinsicInst *II, |
| 223 | Value *OldV, |
| 224 | Value *NewV) const { |
| 225 | Module *M = II->getParent()->getParent()->getParent(); |
| 226 | |
| 227 | switch (II->getIntrinsicID()) { |
| 228 | case Intrinsic::objectsize: |
| 229 | case Intrinsic::amdgcn_atomic_inc: |
| 230 | case Intrinsic::amdgcn_atomic_dec: { |
| 231 | Type *DestTy = II->getType(); |
| 232 | Type *SrcTy = NewV->getType(); |
| 233 | Function *NewDecl |
| 234 | = Intrinsic::getDeclaration(M, II->getIntrinsicID(), { DestTy, SrcTy }); |
| 235 | II->setArgOperand(0, NewV); |
| 236 | II->setCalledFunction(NewDecl); |
| 237 | return true; |
| 238 | } |
| 239 | default: |
| 240 | return false; |
| 241 | } |
| 242 | } |
| 243 | |
| 244 | // TODO: Move logic to TTI? |
| 245 | void InferAddressSpaces::collectRewritableIntrinsicOperands( |
| 246 | IntrinsicInst *II, |
| 247 | std::vector<std::pair<Value *, bool>> *PostorderStack, |
| 248 | DenseSet<Value *> *Visited) const { |
| 249 | switch (II->getIntrinsicID()) { |
| 250 | case Intrinsic::objectsize: |
| 251 | case Intrinsic::amdgcn_atomic_inc: |
| 252 | case Intrinsic::amdgcn_atomic_dec: |
| 253 | appendsFlatAddressExpressionToPostorderStack( |
| 254 | II->getArgOperand(0), PostorderStack, Visited); |
| 255 | break; |
| 256 | default: |
| 257 | break; |
| 258 | } |
| 259 | } |
| 260 | |
| 261 | // Returns all flat address expressions in function F. The elements are |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 262 | // If V is an unvisited flat address expression, appends V to PostorderStack |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 263 | // and marks it as visited. |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 264 | void InferAddressSpaces::appendsFlatAddressExpressionToPostorderStack( |
| 265 | Value *V, std::vector<std::pair<Value *, bool>> *PostorderStack, |
| 266 | DenseSet<Value *> *Visited) const { |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 267 | assert(V->getType()->isPointerTy()); |
| 268 | if (isAddressExpression(*V) && |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 269 | V->getType()->getPointerAddressSpace() == FlatAddrSpace) { |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 270 | if (Visited->insert(V).second) |
| 271 | PostorderStack->push_back(std::make_pair(V, false)); |
| 272 | } |
| 273 | } |
| 274 | |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 275 | // Returns all flat address expressions in function F. The elements are ordered |
Matt Arsenault | 6d5a8d4 | 2017-01-31 01:56:57 +0000 | [diff] [blame^] | 276 | // ordered in postorder. |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 277 | std::vector<Value *> |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 278 | InferAddressSpaces::collectFlatAddressExpressions(Function &F) const { |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 279 | // This function implements a non-recursive postorder traversal of a partial |
| 280 | // use-def graph of function F. |
| 281 | std::vector<std::pair<Value*, bool>> PostorderStack; |
| 282 | // The set of visited expressions. |
| 283 | DenseSet<Value*> Visited; |
Matt Arsenault | 6c907a9 | 2017-01-31 01:40:38 +0000 | [diff] [blame] | 284 | |
| 285 | auto PushPtrOperand = [&](Value *Ptr) { |
| 286 | appendsFlatAddressExpressionToPostorderStack( |
| 287 | Ptr, &PostorderStack, &Visited); |
| 288 | }; |
| 289 | |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 290 | // We only explore address expressions that are reachable from loads and |
| 291 | // stores for now because we aim at generating faster loads and stores. |
| 292 | for (Instruction &I : instructions(F)) { |
Matt Arsenault | 6c907a9 | 2017-01-31 01:40:38 +0000 | [diff] [blame] | 293 | if (auto *LI = dyn_cast<LoadInst>(&I)) |
| 294 | PushPtrOperand(LI->getPointerOperand()); |
| 295 | else if (auto *SI = dyn_cast<StoreInst>(&I)) |
| 296 | PushPtrOperand(SI->getPointerOperand()); |
| 297 | else if (auto *RMW = dyn_cast<AtomicRMWInst>(&I)) |
| 298 | PushPtrOperand(RMW->getPointerOperand()); |
| 299 | else if (auto *CmpX = dyn_cast<AtomicCmpXchgInst>(&I)) |
| 300 | PushPtrOperand(CmpX->getPointerOperand()); |
Matt Arsenault | 6d5a8d4 | 2017-01-31 01:56:57 +0000 | [diff] [blame^] | 301 | else if (auto *MI = dyn_cast<MemIntrinsic>(&I)) { |
| 302 | // For memset/memcpy/memmove, any pointer operand can be replaced. |
| 303 | PushPtrOperand(MI->getRawDest()); |
Matt Arsenault | 6c907a9 | 2017-01-31 01:40:38 +0000 | [diff] [blame] | 304 | |
Matt Arsenault | 6d5a8d4 | 2017-01-31 01:56:57 +0000 | [diff] [blame^] | 305 | // Handle 2nd operand for memcpy/memmove. |
| 306 | if (auto *MTI = dyn_cast<MemTransferInst>(MI)) |
| 307 | PushPtrOperand(MTI->getRawSource()); |
| 308 | } else if (auto *II = dyn_cast<IntrinsicInst>(&I)) |
| 309 | collectRewritableIntrinsicOperands(II, &PostorderStack, &Visited); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 310 | } |
| 311 | |
| 312 | std::vector<Value *> Postorder; // The resultant postorder. |
| 313 | while (!PostorderStack.empty()) { |
| 314 | // If the operands of the expression on the top are already explored, |
| 315 | // adds that expression to the resultant postorder. |
| 316 | if (PostorderStack.back().second) { |
| 317 | Postorder.push_back(PostorderStack.back().first); |
| 318 | PostorderStack.pop_back(); |
| 319 | continue; |
| 320 | } |
| 321 | // Otherwise, adds its operands to the stack and explores them. |
| 322 | PostorderStack.back().second = true; |
| 323 | for (Value *PtrOperand : getPointerOperands(*PostorderStack.back().first)) { |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 324 | appendsFlatAddressExpressionToPostorderStack( |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 325 | PtrOperand, &PostorderStack, &Visited); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 326 | } |
| 327 | } |
| 328 | return Postorder; |
| 329 | } |
| 330 | |
| 331 | // A helper function for cloneInstructionWithNewAddressSpace. Returns the clone |
| 332 | // of OperandUse.get() in the new address space. If the clone is not ready yet, |
| 333 | // returns an undef in the new address space as a placeholder. |
| 334 | static Value *operandWithNewAddressSpaceOrCreateUndef( |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 335 | const Use &OperandUse, unsigned NewAddrSpace, |
| 336 | const ValueToValueMapTy &ValueWithNewAddrSpace, |
| 337 | SmallVectorImpl<const Use *> *UndefUsesToFix) { |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 338 | Value *Operand = OperandUse.get(); |
| 339 | if (Value *NewOperand = ValueWithNewAddrSpace.lookup(Operand)) |
| 340 | return NewOperand; |
| 341 | |
| 342 | UndefUsesToFix->push_back(&OperandUse); |
| 343 | return UndefValue::get( |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 344 | Operand->getType()->getPointerElementType()->getPointerTo(NewAddrSpace)); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 345 | } |
| 346 | |
| 347 | // Returns a clone of `I` with its operands converted to those specified in |
| 348 | // ValueWithNewAddrSpace. Due to potential cycles in the data flow graph, an |
| 349 | // operand whose address space needs to be modified might not exist in |
| 350 | // ValueWithNewAddrSpace. In that case, uses undef as a placeholder operand and |
| 351 | // adds that operand use to UndefUsesToFix so that caller can fix them later. |
| 352 | // |
| 353 | // Note that we do not necessarily clone `I`, e.g., if it is an addrspacecast |
| 354 | // from a pointer whose type already matches. Therefore, this function returns a |
| 355 | // Value* instead of an Instruction*. |
| 356 | static Value *cloneInstructionWithNewAddressSpace( |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 357 | Instruction *I, unsigned NewAddrSpace, |
| 358 | const ValueToValueMapTy &ValueWithNewAddrSpace, |
| 359 | SmallVectorImpl<const Use *> *UndefUsesToFix) { |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 360 | Type *NewPtrType = |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 361 | I->getType()->getPointerElementType()->getPointerTo(NewAddrSpace); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 362 | |
| 363 | if (I->getOpcode() == Instruction::AddrSpaceCast) { |
| 364 | Value *Src = I->getOperand(0); |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 365 | // Because `I` is flat, the source address space must be specific. |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 366 | // Therefore, the inferred address space must be the source space, according |
| 367 | // to our algorithm. |
| 368 | assert(Src->getType()->getPointerAddressSpace() == NewAddrSpace); |
| 369 | if (Src->getType() != NewPtrType) |
| 370 | return new BitCastInst(Src, NewPtrType); |
| 371 | return Src; |
| 372 | } |
| 373 | |
| 374 | // Computes the converted pointer operands. |
| 375 | SmallVector<Value *, 4> NewPointerOperands; |
| 376 | for (const Use &OperandUse : I->operands()) { |
| 377 | if (!OperandUse.get()->getType()->isPointerTy()) |
| 378 | NewPointerOperands.push_back(nullptr); |
| 379 | else |
| 380 | NewPointerOperands.push_back(operandWithNewAddressSpaceOrCreateUndef( |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 381 | OperandUse, NewAddrSpace, ValueWithNewAddrSpace, UndefUsesToFix)); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 382 | } |
| 383 | |
| 384 | switch (I->getOpcode()) { |
| 385 | case Instruction::BitCast: |
| 386 | return new BitCastInst(NewPointerOperands[0], NewPtrType); |
| 387 | case Instruction::PHI: { |
| 388 | assert(I->getType()->isPointerTy()); |
| 389 | PHINode *PHI = cast<PHINode>(I); |
| 390 | PHINode *NewPHI = PHINode::Create(NewPtrType, PHI->getNumIncomingValues()); |
| 391 | for (unsigned Index = 0; Index < PHI->getNumIncomingValues(); ++Index) { |
| 392 | unsigned OperandNo = PHINode::getOperandNumForIncomingValue(Index); |
| 393 | NewPHI->addIncoming(NewPointerOperands[OperandNo], |
| 394 | PHI->getIncomingBlock(Index)); |
| 395 | } |
| 396 | return NewPHI; |
| 397 | } |
| 398 | case Instruction::GetElementPtr: { |
| 399 | GetElementPtrInst *GEP = cast<GetElementPtrInst>(I); |
| 400 | GetElementPtrInst *NewGEP = GetElementPtrInst::Create( |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 401 | GEP->getSourceElementType(), NewPointerOperands[0], |
| 402 | SmallVector<Value *, 4>(GEP->idx_begin(), GEP->idx_end())); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 403 | NewGEP->setIsInBounds(GEP->isInBounds()); |
| 404 | return NewGEP; |
| 405 | } |
| 406 | default: |
| 407 | llvm_unreachable("Unexpected opcode"); |
| 408 | } |
| 409 | } |
| 410 | |
| 411 | // Similar to cloneInstructionWithNewAddressSpace, returns a clone of the |
| 412 | // constant expression `CE` with its operands replaced as specified in |
| 413 | // ValueWithNewAddrSpace. |
| 414 | static Value *cloneConstantExprWithNewAddressSpace( |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 415 | ConstantExpr *CE, unsigned NewAddrSpace, |
| 416 | const ValueToValueMapTy &ValueWithNewAddrSpace) { |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 417 | Type *TargetType = |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 418 | CE->getType()->getPointerElementType()->getPointerTo(NewAddrSpace); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 419 | |
| 420 | if (CE->getOpcode() == Instruction::AddrSpaceCast) { |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 421 | // Because CE is flat, the source address space must be specific. |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 422 | // Therefore, the inferred address space must be the source space according |
| 423 | // to our algorithm. |
| 424 | assert(CE->getOperand(0)->getType()->getPointerAddressSpace() == |
| 425 | NewAddrSpace); |
| 426 | return ConstantExpr::getBitCast(CE->getOperand(0), TargetType); |
| 427 | } |
| 428 | |
| 429 | // Computes the operands of the new constant expression. |
| 430 | SmallVector<Constant *, 4> NewOperands; |
| 431 | for (unsigned Index = 0; Index < CE->getNumOperands(); ++Index) { |
| 432 | Constant *Operand = CE->getOperand(Index); |
| 433 | // If the address space of `Operand` needs to be modified, the new operand |
| 434 | // with the new address space should already be in ValueWithNewAddrSpace |
| 435 | // because (1) the constant expressions we consider (i.e. addrspacecast, |
| 436 | // bitcast, and getelementptr) do not incur cycles in the data flow graph |
| 437 | // and (2) this function is called on constant expressions in postorder. |
| 438 | if (Value *NewOperand = ValueWithNewAddrSpace.lookup(Operand)) { |
| 439 | NewOperands.push_back(cast<Constant>(NewOperand)); |
| 440 | } else { |
| 441 | // Otherwise, reuses the old operand. |
| 442 | NewOperands.push_back(Operand); |
| 443 | } |
| 444 | } |
| 445 | |
| 446 | if (CE->getOpcode() == Instruction::GetElementPtr) { |
| 447 | // Needs to specify the source type while constructing a getelementptr |
| 448 | // constant expression. |
| 449 | return CE->getWithOperands( |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 450 | NewOperands, TargetType, /*OnlyIfReduced=*/false, |
| 451 | NewOperands[0]->getType()->getPointerElementType()); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 452 | } |
| 453 | |
| 454 | return CE->getWithOperands(NewOperands, TargetType); |
| 455 | } |
| 456 | |
| 457 | // Returns a clone of the value `V`, with its operands replaced as specified in |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 458 | // ValueWithNewAddrSpace. This function is called on every flat address |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 459 | // expression whose address space needs to be modified, in postorder. |
| 460 | // |
| 461 | // See cloneInstructionWithNewAddressSpace for the meaning of UndefUsesToFix. |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 462 | Value *InferAddressSpaces::cloneValueWithNewAddressSpace( |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 463 | Value *V, unsigned NewAddrSpace, |
| 464 | const ValueToValueMapTy &ValueWithNewAddrSpace, |
| 465 | SmallVectorImpl<const Use *> *UndefUsesToFix) const { |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 466 | // All values in Postorder are flat address expressions. |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 467 | assert(isAddressExpression(*V) && |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 468 | V->getType()->getPointerAddressSpace() == FlatAddrSpace); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 469 | |
| 470 | if (Instruction *I = dyn_cast<Instruction>(V)) { |
| 471 | Value *NewV = cloneInstructionWithNewAddressSpace( |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 472 | I, NewAddrSpace, ValueWithNewAddrSpace, UndefUsesToFix); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 473 | if (Instruction *NewI = dyn_cast<Instruction>(NewV)) { |
| 474 | if (NewI->getParent() == nullptr) { |
| 475 | NewI->insertBefore(I); |
| 476 | NewI->takeName(I); |
| 477 | } |
| 478 | } |
| 479 | return NewV; |
| 480 | } |
| 481 | |
| 482 | return cloneConstantExprWithNewAddressSpace( |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 483 | cast<ConstantExpr>(V), NewAddrSpace, ValueWithNewAddrSpace); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 484 | } |
| 485 | |
| 486 | // Defines the join operation on the address space lattice (see the file header |
| 487 | // comments). |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 488 | unsigned InferAddressSpaces::joinAddressSpaces(unsigned AS1, |
| 489 | unsigned AS2) const { |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 490 | if (AS1 == FlatAddrSpace || AS2 == FlatAddrSpace) |
| 491 | return FlatAddrSpace; |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 492 | |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 493 | if (AS1 == UnknownAddressSpace) |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 494 | return AS2; |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 495 | if (AS2 == UnknownAddressSpace) |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 496 | return AS1; |
| 497 | |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 498 | // The join of two different specific address spaces is flat. |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 499 | return (AS1 == AS2) ? AS1 : FlatAddrSpace; |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 500 | } |
| 501 | |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 502 | bool InferAddressSpaces::runOnFunction(Function &F) { |
Andrew Kaylor | 87b10dd | 2016-04-26 23:44:31 +0000 | [diff] [blame] | 503 | if (skipFunction(F)) |
| 504 | return false; |
| 505 | |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 506 | const TargetTransformInfo &TTI = getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F); |
| 507 | FlatAddrSpace = TTI.getFlatAddressSpace(); |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 508 | if (FlatAddrSpace == UnknownAddressSpace) |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 509 | return false; |
| 510 | |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 511 | // Collects all flat address expressions in postorder. |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 512 | std::vector<Value *> Postorder = collectFlatAddressExpressions(F); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 513 | |
| 514 | // Runs a data-flow analysis to refine the address spaces of every expression |
| 515 | // in Postorder. |
| 516 | ValueToAddrSpaceMapTy InferredAddrSpace; |
| 517 | inferAddressSpaces(Postorder, &InferredAddrSpace); |
| 518 | |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 519 | // Changes the address spaces of the flat address expressions who are inferred |
| 520 | // to point to a specific address space. |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 521 | return rewriteWithNewAddressSpaces(Postorder, InferredAddrSpace, &F); |
| 522 | } |
| 523 | |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 524 | void InferAddressSpaces::inferAddressSpaces( |
| 525 | const std::vector<Value *> &Postorder, |
| 526 | ValueToAddrSpaceMapTy *InferredAddrSpace) const { |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 527 | SetVector<Value *> Worklist(Postorder.begin(), Postorder.end()); |
| 528 | // Initially, all expressions are in the uninitialized address space. |
| 529 | for (Value *V : Postorder) |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 530 | (*InferredAddrSpace)[V] = UnknownAddressSpace; |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 531 | |
| 532 | while (!Worklist.empty()) { |
| 533 | Value* V = Worklist.pop_back_val(); |
| 534 | |
| 535 | // Tries to update the address space of the stack top according to the |
| 536 | // address spaces of its operands. |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 537 | DEBUG(dbgs() << "Updating the address space of\n " << *V << '\n'); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 538 | Optional<unsigned> NewAS = updateAddressSpace(*V, *InferredAddrSpace); |
| 539 | if (!NewAS.hasValue()) |
| 540 | continue; |
| 541 | // If any updates are made, grabs its users to the worklist because |
| 542 | // their address spaces can also be possibly updated. |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 543 | DEBUG(dbgs() << " to " << NewAS.getValue() << '\n'); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 544 | (*InferredAddrSpace)[V] = NewAS.getValue(); |
| 545 | |
| 546 | for (Value *User : V->users()) { |
| 547 | // Skip if User is already in the worklist. |
| 548 | if (Worklist.count(User)) |
| 549 | continue; |
| 550 | |
| 551 | auto Pos = InferredAddrSpace->find(User); |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 552 | // Our algorithm only updates the address spaces of flat address |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 553 | // expressions, which are those in InferredAddrSpace. |
| 554 | if (Pos == InferredAddrSpace->end()) |
| 555 | continue; |
| 556 | |
| 557 | // Function updateAddressSpace moves the address space down a lattice |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 558 | // path. Therefore, nothing to do if User is already inferred as flat (the |
| 559 | // bottom element in the lattice). |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 560 | if (Pos->second == FlatAddrSpace) |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 561 | continue; |
| 562 | |
| 563 | Worklist.insert(User); |
| 564 | } |
| 565 | } |
| 566 | } |
| 567 | |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 568 | Optional<unsigned> InferAddressSpaces::updateAddressSpace( |
| 569 | const Value &V, const ValueToAddrSpaceMapTy &InferredAddrSpace) const { |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 570 | assert(InferredAddrSpace.count(&V)); |
| 571 | |
| 572 | // The new inferred address space equals the join of the address spaces |
| 573 | // of all its pointer operands. |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 574 | unsigned NewAS = UnknownAddressSpace; |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 575 | for (Value *PtrOperand : getPointerOperands(V)) { |
| 576 | unsigned OperandAS; |
| 577 | if (InferredAddrSpace.count(PtrOperand)) |
| 578 | OperandAS = InferredAddrSpace.lookup(PtrOperand); |
| 579 | else |
| 580 | OperandAS = PtrOperand->getType()->getPointerAddressSpace(); |
| 581 | NewAS = joinAddressSpaces(NewAS, OperandAS); |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 582 | |
| 583 | // join(flat, *) = flat. So we can break if NewAS is already flat. |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 584 | if (NewAS == FlatAddrSpace) |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 585 | break; |
| 586 | } |
| 587 | |
| 588 | unsigned OldAS = InferredAddrSpace.lookup(&V); |
Matt Arsenault | 42b6478 | 2017-01-30 23:02:12 +0000 | [diff] [blame] | 589 | assert(OldAS != FlatAddrSpace); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 590 | if (OldAS == NewAS) |
| 591 | return None; |
| 592 | return NewAS; |
| 593 | } |
| 594 | |
Matt Arsenault | 6c907a9 | 2017-01-31 01:40:38 +0000 | [diff] [blame] | 595 | /// \p returns true if \p U is the pointer operand of a memory instruction with |
| 596 | /// a single pointer operand that can have its address space changed by simply |
| 597 | /// mutating the use to a new value. |
| 598 | static bool isSimplePointerUseValidToReplace(Use &U) { |
| 599 | User *Inst = U.getUser(); |
| 600 | unsigned OpNo = U.getOperandNo(); |
| 601 | |
| 602 | if (auto *LI = dyn_cast<LoadInst>(Inst)) |
| 603 | return OpNo == LoadInst::getPointerOperandIndex() && !LI->isVolatile(); |
| 604 | |
| 605 | if (auto *SI = dyn_cast<StoreInst>(Inst)) |
| 606 | return OpNo == StoreInst::getPointerOperandIndex() && !SI->isVolatile(); |
| 607 | |
| 608 | if (auto *RMW = dyn_cast<AtomicRMWInst>(Inst)) |
| 609 | return OpNo == AtomicRMWInst::getPointerOperandIndex() && !RMW->isVolatile(); |
| 610 | |
| 611 | if (auto *CmpX = dyn_cast<AtomicCmpXchgInst>(Inst)) { |
| 612 | return OpNo == AtomicCmpXchgInst::getPointerOperandIndex() && |
| 613 | !CmpX->isVolatile(); |
| 614 | } |
| 615 | |
| 616 | return false; |
| 617 | } |
| 618 | |
Matt Arsenault | 6d5a8d4 | 2017-01-31 01:56:57 +0000 | [diff] [blame^] | 619 | /// Update memory intrinsic uses that require more complex processing than |
| 620 | /// simple memory instructions. Thse require re-mangling and may have multiple |
| 621 | /// pointer operands. |
| 622 | static bool handleMemIntrinsicPtrUse(MemIntrinsic *MI, |
| 623 | Value *OldV, Value *NewV) { |
| 624 | IRBuilder<> B(MI); |
| 625 | MDNode *TBAA = MI->getMetadata(LLVMContext::MD_tbaa); |
| 626 | MDNode *ScopeMD = MI->getMetadata(LLVMContext::MD_alias_scope); |
| 627 | MDNode *NoAliasMD = MI->getMetadata(LLVMContext::MD_noalias); |
| 628 | |
| 629 | if (auto *MSI = dyn_cast<MemSetInst>(MI)) { |
| 630 | B.CreateMemSet(NewV, MSI->getValue(), |
| 631 | MSI->getLength(), MSI->getAlignment(), |
| 632 | false, // isVolatile |
| 633 | TBAA, ScopeMD, NoAliasMD); |
| 634 | } else if (auto *MTI = dyn_cast<MemTransferInst>(MI)) { |
| 635 | Value *Src = MTI->getRawSource(); |
| 636 | Value *Dest = MTI->getRawDest(); |
| 637 | |
| 638 | // Be careful in case this is a self-to-self copy. |
| 639 | if (Src == OldV) |
| 640 | Src = NewV; |
| 641 | |
| 642 | if (Dest == OldV) |
| 643 | Dest = NewV; |
| 644 | |
| 645 | if (isa<MemCpyInst>(MTI)) { |
| 646 | MDNode *TBAAStruct = MTI->getMetadata(LLVMContext::MD_tbaa_struct); |
| 647 | B.CreateMemCpy(Dest, Src, MTI->getLength(), |
| 648 | MTI->getAlignment(), |
| 649 | false, // isVolatile |
| 650 | TBAA, TBAAStruct, ScopeMD, NoAliasMD); |
| 651 | } else { |
| 652 | assert(isa<MemMoveInst>(MTI)); |
| 653 | B.CreateMemMove(Dest, Src, MTI->getLength(), |
| 654 | MTI->getAlignment(), |
| 655 | false, // isVolatile |
| 656 | TBAA, ScopeMD, NoAliasMD); |
| 657 | } |
| 658 | } else |
| 659 | llvm_unreachable("unhandled MemIntrinsic"); |
| 660 | |
| 661 | MI->eraseFromParent(); |
| 662 | return true; |
| 663 | } |
| 664 | |
| 665 | static Value::use_iterator skipToNextUser(Value::use_iterator I, |
| 666 | Value::use_iterator End) { |
| 667 | User *CurUser = I->getUser(); |
| 668 | ++I; |
| 669 | |
| 670 | while (I != End && I->getUser() == CurUser) |
| 671 | ++I; |
| 672 | |
| 673 | return I; |
| 674 | } |
| 675 | |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 676 | bool InferAddressSpaces::rewriteWithNewAddressSpaces( |
| 677 | const std::vector<Value *> &Postorder, |
| 678 | const ValueToAddrSpaceMapTy &InferredAddrSpace, Function *F) const { |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 679 | // For each address expression to be modified, creates a clone of it with its |
| 680 | // pointer operands converted to the new address space. Since the pointer |
| 681 | // operands are converted, the clone is naturally in the new address space by |
| 682 | // construction. |
| 683 | ValueToValueMapTy ValueWithNewAddrSpace; |
| 684 | SmallVector<const Use *, 32> UndefUsesToFix; |
| 685 | for (Value* V : Postorder) { |
| 686 | unsigned NewAddrSpace = InferredAddrSpace.lookup(V); |
| 687 | if (V->getType()->getPointerAddressSpace() != NewAddrSpace) { |
| 688 | ValueWithNewAddrSpace[V] = cloneValueWithNewAddressSpace( |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 689 | V, NewAddrSpace, ValueWithNewAddrSpace, &UndefUsesToFix); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 690 | } |
| 691 | } |
| 692 | |
| 693 | if (ValueWithNewAddrSpace.empty()) |
| 694 | return false; |
| 695 | |
| 696 | // Fixes all the undef uses generated by cloneInstructionWithNewAddressSpace. |
| 697 | for (const Use* UndefUse : UndefUsesToFix) { |
| 698 | User *V = UndefUse->getUser(); |
| 699 | User *NewV = cast<User>(ValueWithNewAddrSpace.lookup(V)); |
| 700 | unsigned OperandNo = UndefUse->getOperandNo(); |
| 701 | assert(isa<UndefValue>(NewV->getOperand(OperandNo))); |
| 702 | NewV->setOperand(OperandNo, ValueWithNewAddrSpace.lookup(UndefUse->get())); |
| 703 | } |
| 704 | |
| 705 | // Replaces the uses of the old address expressions with the new ones. |
| 706 | for (Value *V : Postorder) { |
| 707 | Value *NewV = ValueWithNewAddrSpace.lookup(V); |
| 708 | if (NewV == nullptr) |
| 709 | continue; |
| 710 | |
Matt Arsenault | 9f432ec | 2017-01-30 23:27:11 +0000 | [diff] [blame] | 711 | DEBUG(dbgs() << "Replacing the uses of " << *V |
| 712 | << "\n with\n " << *NewV << '\n'); |
| 713 | |
Matt Arsenault | 6d5a8d4 | 2017-01-31 01:56:57 +0000 | [diff] [blame^] | 714 | Value::use_iterator I, E, Next; |
| 715 | for (I = V->use_begin(), E = V->use_end(); I != E; ) { |
| 716 | Use &U = *I; |
| 717 | |
| 718 | // Some users may see the same pointer operand in multiple operands. Skip |
| 719 | // to the next instruction. |
| 720 | I = skipToNextUser(I, E); |
| 721 | |
| 722 | if (isSimplePointerUseValidToReplace(U)) { |
Matt Arsenault | 6c907a9 | 2017-01-31 01:40:38 +0000 | [diff] [blame] | 723 | // If V is used as the pointer operand of a compatible memory operation, |
| 724 | // sets the pointer operand to NewV. This replacement does not change |
| 725 | // the element type, so the resultant load/store is still valid. |
Matt Arsenault | 6d5a8d4 | 2017-01-31 01:56:57 +0000 | [diff] [blame^] | 726 | U.set(NewV); |
| 727 | continue; |
| 728 | } |
| 729 | |
| 730 | User *CurUser = U.getUser(); |
| 731 | // Handle more complex cases like intrinsic that need to be remangled. |
| 732 | if (auto *MI = dyn_cast<MemIntrinsic>(CurUser)) { |
| 733 | if (!MI->isVolatile() && handleMemIntrinsicPtrUse(MI, V, NewV)) |
| 734 | continue; |
| 735 | } |
| 736 | |
| 737 | if (auto *II = dyn_cast<IntrinsicInst>(CurUser)) { |
| 738 | if (rewriteIntrinsicOperands(II, V, NewV)) |
| 739 | continue; |
| 740 | } |
| 741 | |
| 742 | if (isa<Instruction>(CurUser)) { |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 743 | // Otherwise, replaces the use with flat(NewV). |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 744 | // TODO: Some optimization opportunities are missed. For example, in |
| 745 | // %0 = icmp eq float* %p, %q |
| 746 | // if both p and q are inferred to be shared, we can rewrite %0 as |
| 747 | // %0 = icmp eq float addrspace(3)* %new_p, %new_q |
| 748 | // instead of currently |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 749 | // %flat_p = addrspacecast float addrspace(3)* %new_p to float* |
| 750 | // %flat_q = addrspacecast float addrspace(3)* %new_q to float* |
| 751 | // %0 = icmp eq float* %flat_p, %flat_q |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 752 | if (Instruction *I = dyn_cast<Instruction>(V)) { |
| 753 | BasicBlock::iterator InsertPos = std::next(I->getIterator()); |
| 754 | while (isa<PHINode>(InsertPos)) |
| 755 | ++InsertPos; |
Matt Arsenault | 6d5a8d4 | 2017-01-31 01:56:57 +0000 | [diff] [blame^] | 756 | U.set(new AddrSpaceCastInst(NewV, V->getType(), "", &*InsertPos)); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 757 | } else { |
Matt Arsenault | 6d5a8d4 | 2017-01-31 01:56:57 +0000 | [diff] [blame^] | 758 | U.set(ConstantExpr::getAddrSpaceCast(cast<Constant>(NewV), |
| 759 | V->getType())); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 760 | } |
| 761 | } |
| 762 | } |
Matt Arsenault | 6d5a8d4 | 2017-01-31 01:56:57 +0000 | [diff] [blame^] | 763 | |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 764 | if (V->use_empty()) |
| 765 | RecursivelyDeleteTriviallyDeadInstructions(V); |
| 766 | } |
| 767 | |
| 768 | return true; |
| 769 | } |
| 770 | |
Matt Arsenault | 850657a | 2017-01-31 01:10:58 +0000 | [diff] [blame] | 771 | FunctionPass *llvm::createInferAddressSpacesPass() { |
| 772 | return new InferAddressSpaces(); |
Jingyue Wu | 1375560 | 2016-03-20 20:59:20 +0000 | [diff] [blame] | 773 | } |