Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 1 | //===- ScalarReplAggregates.cpp - Scalar Replacement of Aggregates --------===// |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 2 | // |
John Criswell | b576c94 | 2003-10-20 19:43:21 +0000 | [diff] [blame] | 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file was developed by the LLVM research group and is distributed under |
| 6 | // the University of Illinois Open Source License. See LICENSE.TXT for details. |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 7 | // |
John Criswell | b576c94 | 2003-10-20 19:43:21 +0000 | [diff] [blame] | 8 | //===----------------------------------------------------------------------===// |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 9 | // |
| 10 | // This transformation implements the well known scalar replacement of |
| 11 | // aggregates transformation. This xform breaks up alloca instructions of |
| 12 | // aggregate type (structure or array) into individual alloca instructions for |
Chris Lattner | 38aec32 | 2003-09-11 16:45:55 +0000 | [diff] [blame] | 13 | // each member (if possible). Then, if possible, it transforms the individual |
| 14 | // alloca instructions into nice clean scalar SSA form. |
| 15 | // |
| 16 | // This combines a simple SRoA algorithm with the Mem2Reg algorithm because |
| 17 | // often interact, especially for C++ programs. As such, iterating between |
| 18 | // SRoA, then Mem2Reg until we run out of things to promote works well. |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 19 | // |
| 20 | //===----------------------------------------------------------------------===// |
| 21 | |
| 22 | #include "llvm/Transforms/Scalar.h" |
Chris Lattner | 38aec32 | 2003-09-11 16:45:55 +0000 | [diff] [blame] | 23 | #include "llvm/Constants.h" |
| 24 | #include "llvm/DerivedTypes.h" |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 25 | #include "llvm/Function.h" |
| 26 | #include "llvm/Pass.h" |
Misha Brukman | d8e1eea | 2004-07-29 17:05:13 +0000 | [diff] [blame] | 27 | #include "llvm/Instructions.h" |
Chris Lattner | 38aec32 | 2003-09-11 16:45:55 +0000 | [diff] [blame] | 28 | #include "llvm/Analysis/Dominators.h" |
Chris Lattner | be883a2 | 2003-11-25 21:09:18 +0000 | [diff] [blame] | 29 | #include "llvm/Support/GetElementPtrTypeIterator.h" |
Chris Lattner | 38aec32 | 2003-09-11 16:45:55 +0000 | [diff] [blame] | 30 | #include "llvm/Target/TargetData.h" |
| 31 | #include "llvm/Transforms/Utils/PromoteMemToReg.h" |
Reid Spencer | 551ccae | 2004-09-01 22:55:40 +0000 | [diff] [blame] | 32 | #include "llvm/Support/Debug.h" |
| 33 | #include "llvm/ADT/Statistic.h" |
| 34 | #include "llvm/ADT/StringExtras.h" |
Chris Lattner | d866473 | 2003-12-02 17:43:55 +0000 | [diff] [blame] | 35 | using namespace llvm; |
Brian Gaeke | d0fde30 | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 36 | |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 37 | namespace { |
Misha Brukman | 3cfb6b1 | 2003-09-11 16:58:31 +0000 | [diff] [blame] | 38 | Statistic<> NumReplaced("scalarrepl", "Number of allocas broken up"); |
| 39 | Statistic<> NumPromoted("scalarrepl", "Number of allocas promoted"); |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 40 | |
| 41 | struct SROA : public FunctionPass { |
| 42 | bool runOnFunction(Function &F); |
| 43 | |
Chris Lattner | 38aec32 | 2003-09-11 16:45:55 +0000 | [diff] [blame] | 44 | bool performScalarRepl(Function &F); |
| 45 | bool performPromotion(Function &F); |
| 46 | |
Chris Lattner | a15854c | 2003-08-31 00:45:13 +0000 | [diff] [blame] | 47 | // getAnalysisUsage - This pass does not require any passes, but we know it |
| 48 | // will not alter the CFG, so say so. |
| 49 | virtual void getAnalysisUsage(AnalysisUsage &AU) const { |
Chris Lattner | 43f820d | 2003-10-05 21:20:13 +0000 | [diff] [blame] | 50 | AU.addRequired<DominatorTree>(); |
Chris Lattner | 38aec32 | 2003-09-11 16:45:55 +0000 | [diff] [blame] | 51 | AU.addRequired<DominanceFrontier>(); |
| 52 | AU.addRequired<TargetData>(); |
Chris Lattner | a15854c | 2003-08-31 00:45:13 +0000 | [diff] [blame] | 53 | AU.setPreservesCFG(); |
| 54 | } |
| 55 | |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 56 | private: |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 57 | int isSafeElementUse(Value *Ptr); |
| 58 | int isSafeUseOfAllocation(Instruction *User); |
| 59 | int isSafeAllocaToScalarRepl(AllocationInst *AI); |
| 60 | void CanonicalizeAllocaUsers(AllocationInst *AI); |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 61 | AllocaInst *AddNewAlloca(Function &F, const Type *Ty, AllocationInst *Base); |
| 62 | }; |
| 63 | |
| 64 | RegisterOpt<SROA> X("scalarrepl", "Scalar Replacement of Aggregates"); |
| 65 | } |
| 66 | |
Brian Gaeke | d0fde30 | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 67 | // Public interface to the ScalarReplAggregates pass |
Chris Lattner | 4b50156 | 2004-09-20 04:43:15 +0000 | [diff] [blame] | 68 | FunctionPass *llvm::createScalarReplAggregatesPass() { return new SROA(); } |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 69 | |
| 70 | |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 71 | bool SROA::runOnFunction(Function &F) { |
Chris Lattner | fe7ea0d | 2003-09-12 15:36:03 +0000 | [diff] [blame] | 72 | bool Changed = performPromotion(F); |
| 73 | while (1) { |
| 74 | bool LocalChange = performScalarRepl(F); |
| 75 | if (!LocalChange) break; // No need to repromote if no scalarrepl |
| 76 | Changed = true; |
| 77 | LocalChange = performPromotion(F); |
| 78 | if (!LocalChange) break; // No need to re-scalarrepl if no promotion |
| 79 | } |
Chris Lattner | 38aec32 | 2003-09-11 16:45:55 +0000 | [diff] [blame] | 80 | |
| 81 | return Changed; |
| 82 | } |
| 83 | |
| 84 | |
| 85 | bool SROA::performPromotion(Function &F) { |
| 86 | std::vector<AllocaInst*> Allocas; |
| 87 | const TargetData &TD = getAnalysis<TargetData>(); |
Chris Lattner | 43f820d | 2003-10-05 21:20:13 +0000 | [diff] [blame] | 88 | DominatorTree &DT = getAnalysis<DominatorTree>(); |
| 89 | DominanceFrontier &DF = getAnalysis<DominanceFrontier>(); |
Chris Lattner | 38aec32 | 2003-09-11 16:45:55 +0000 | [diff] [blame] | 90 | |
Chris Lattner | 02a3be0 | 2003-09-20 14:39:18 +0000 | [diff] [blame] | 91 | BasicBlock &BB = F.getEntryBlock(); // Get the entry node for the function |
Chris Lattner | 38aec32 | 2003-09-11 16:45:55 +0000 | [diff] [blame] | 92 | |
Chris Lattner | fe7ea0d | 2003-09-12 15:36:03 +0000 | [diff] [blame] | 93 | bool Changed = false; |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 94 | |
Chris Lattner | 38aec32 | 2003-09-11 16:45:55 +0000 | [diff] [blame] | 95 | while (1) { |
| 96 | Allocas.clear(); |
| 97 | |
| 98 | // Find allocas that are safe to promote, by looking at all instructions in |
| 99 | // the entry node |
| 100 | for (BasicBlock::iterator I = BB.begin(), E = --BB.end(); I != E; ++I) |
| 101 | if (AllocaInst *AI = dyn_cast<AllocaInst>(I)) // Is it an alloca? |
| 102 | if (isAllocaPromotable(AI, TD)) |
| 103 | Allocas.push_back(AI); |
| 104 | |
| 105 | if (Allocas.empty()) break; |
| 106 | |
Chris Lattner | 43f820d | 2003-10-05 21:20:13 +0000 | [diff] [blame] | 107 | PromoteMemToReg(Allocas, DT, DF, TD); |
Chris Lattner | 38aec32 | 2003-09-11 16:45:55 +0000 | [diff] [blame] | 108 | NumPromoted += Allocas.size(); |
| 109 | Changed = true; |
| 110 | } |
| 111 | |
| 112 | return Changed; |
| 113 | } |
| 114 | |
| 115 | |
| 116 | // performScalarRepl - This algorithm is a simple worklist driven algorithm, |
| 117 | // which runs on all of the malloc/alloca instructions in the function, removing |
| 118 | // them if they are only used by getelementptr instructions. |
| 119 | // |
| 120 | bool SROA::performScalarRepl(Function &F) { |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 121 | std::vector<AllocationInst*> WorkList; |
| 122 | |
| 123 | // Scan the entry basic block, adding any alloca's and mallocs to the worklist |
Chris Lattner | 02a3be0 | 2003-09-20 14:39:18 +0000 | [diff] [blame] | 124 | BasicBlock &BB = F.getEntryBlock(); |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 125 | for (BasicBlock::iterator I = BB.begin(), E = BB.end(); I != E; ++I) |
| 126 | if (AllocationInst *A = dyn_cast<AllocationInst>(I)) |
| 127 | WorkList.push_back(A); |
| 128 | |
| 129 | // Process the worklist |
| 130 | bool Changed = false; |
| 131 | while (!WorkList.empty()) { |
| 132 | AllocationInst *AI = WorkList.back(); |
| 133 | WorkList.pop_back(); |
| 134 | |
| 135 | // We cannot transform the allocation instruction if it is an array |
Chris Lattner | d10376b | 2003-05-27 16:09:27 +0000 | [diff] [blame] | 136 | // allocation (allocations OF arrays are ok though), and an allocation of a |
| 137 | // scalar value cannot be decomposed at all. |
| 138 | // |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 139 | if (AI->isArrayAllocation() || |
Chris Lattner | d10376b | 2003-05-27 16:09:27 +0000 | [diff] [blame] | 140 | (!isa<StructType>(AI->getAllocatedType()) && |
| 141 | !isa<ArrayType>(AI->getAllocatedType()))) continue; |
| 142 | |
Chris Lattner | 5e062a1 | 2003-05-30 04:15:41 +0000 | [diff] [blame] | 143 | // Check that all of the users of the allocation are capable of being |
| 144 | // transformed. |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 145 | switch (isSafeAllocaToScalarRepl(AI)) { |
| 146 | default: assert(0 && "Unexpected value!"); |
| 147 | case 0: // Not safe to scalar replace. |
Chris Lattner | 5e062a1 | 2003-05-30 04:15:41 +0000 | [diff] [blame] | 148 | continue; |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 149 | case 1: // Safe, but requires cleanup/canonicalizations first |
| 150 | CanonicalizeAllocaUsers(AI); |
| 151 | case 3: // Safe to scalar replace. |
| 152 | break; |
| 153 | } |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 154 | |
| 155 | DEBUG(std::cerr << "Found inst to xform: " << *AI); |
| 156 | Changed = true; |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 157 | |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 158 | std::vector<AllocaInst*> ElementAllocas; |
| 159 | if (const StructType *ST = dyn_cast<StructType>(AI->getAllocatedType())) { |
| 160 | ElementAllocas.reserve(ST->getNumContainedTypes()); |
| 161 | for (unsigned i = 0, e = ST->getNumContainedTypes(); i != e; ++i) { |
| 162 | AllocaInst *NA = new AllocaInst(ST->getContainedType(i), 0, |
| 163 | AI->getName() + "." + utostr(i), AI); |
| 164 | ElementAllocas.push_back(NA); |
| 165 | WorkList.push_back(NA); // Add to worklist for recursive processing |
| 166 | } |
| 167 | } else { |
Chris Lattner | 5e062a1 | 2003-05-30 04:15:41 +0000 | [diff] [blame] | 168 | const ArrayType *AT = cast<ArrayType>(AI->getAllocatedType()); |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 169 | ElementAllocas.reserve(AT->getNumElements()); |
| 170 | const Type *ElTy = AT->getElementType(); |
| 171 | for (unsigned i = 0, e = AT->getNumElements(); i != e; ++i) { |
| 172 | AllocaInst *NA = new AllocaInst(ElTy, 0, |
| 173 | AI->getName() + "." + utostr(i), AI); |
| 174 | ElementAllocas.push_back(NA); |
| 175 | WorkList.push_back(NA); // Add to worklist for recursive processing |
| 176 | } |
| 177 | } |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 178 | |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 179 | // Now that we have created the alloca instructions that we want to use, |
| 180 | // expand the getelementptr instructions to use them. |
| 181 | // |
Chris Lattner | 8430a45 | 2004-06-19 02:02:22 +0000 | [diff] [blame] | 182 | while (!AI->use_empty()) { |
| 183 | Instruction *User = cast<Instruction>(AI->use_back()); |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 184 | GetElementPtrInst *GEPI = cast<GetElementPtrInst>(User); |
| 185 | // We now know that the GEP is of the form: GEP <ptr>, 0, <cst> |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 186 | unsigned Idx = |
Chris Lattner | 2cc3462 | 2005-01-08 19:34:41 +0000 | [diff] [blame] | 187 | (unsigned)cast<ConstantInt>(GEPI->getOperand(2))->getRawValue(); |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 188 | |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 189 | assert(Idx < ElementAllocas.size() && "Index out of range?"); |
| 190 | AllocaInst *AllocaToUse = ElementAllocas[Idx]; |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 191 | |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 192 | Value *RepValue; |
| 193 | if (GEPI->getNumOperands() == 3) { |
| 194 | // Do not insert a new getelementptr instruction with zero indices, only |
| 195 | // to have it optimized out later. |
| 196 | RepValue = AllocaToUse; |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 197 | } else { |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 198 | // We are indexing deeply into the structure, so we still need a |
| 199 | // getelement ptr instruction to finish the indexing. This may be |
| 200 | // expanded itself once the worklist is rerun. |
| 201 | // |
| 202 | std::string OldName = GEPI->getName(); // Steal the old name. |
| 203 | std::vector<Value*> NewArgs; |
| 204 | NewArgs.push_back(Constant::getNullValue(Type::IntTy)); |
| 205 | NewArgs.insert(NewArgs.end(), GEPI->op_begin()+3, GEPI->op_end()); |
| 206 | GEPI->setName(""); |
| 207 | RepValue = new GetElementPtrInst(AllocaToUse, NewArgs, OldName, GEPI); |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 208 | } |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 209 | |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 210 | // Move all of the users over to the new GEP. |
| 211 | GEPI->replaceAllUsesWith(RepValue); |
| 212 | // Delete the old GEP |
| 213 | GEPI->eraseFromParent(); |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 214 | } |
| 215 | |
| 216 | // Finally, delete the Alloca instruction |
| 217 | AI->getParent()->getInstList().erase(AI); |
Chris Lattner | d10376b | 2003-05-27 16:09:27 +0000 | [diff] [blame] | 218 | NumReplaced++; |
Chris Lattner | ed7b41e | 2003-05-27 15:45:27 +0000 | [diff] [blame] | 219 | } |
| 220 | |
| 221 | return Changed; |
| 222 | } |
Chris Lattner | 5e062a1 | 2003-05-30 04:15:41 +0000 | [diff] [blame] | 223 | |
| 224 | |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 225 | /// isSafeElementUse - Check to see if this use is an allowed use for a |
| 226 | /// getelementptr instruction of an array aggregate allocation. |
| 227 | /// |
| 228 | int SROA::isSafeElementUse(Value *Ptr) { |
| 229 | for (Value::use_iterator I = Ptr->use_begin(), E = Ptr->use_end(); |
| 230 | I != E; ++I) { |
| 231 | Instruction *User = cast<Instruction>(*I); |
| 232 | switch (User->getOpcode()) { |
| 233 | case Instruction::Load: break; |
| 234 | case Instruction::Store: |
| 235 | // Store is ok if storing INTO the pointer, not storing the pointer |
| 236 | if (User->getOperand(0) == Ptr) return 0; |
| 237 | break; |
| 238 | case Instruction::GetElementPtr: { |
| 239 | GetElementPtrInst *GEP = cast<GetElementPtrInst>(User); |
| 240 | if (GEP->getNumOperands() > 1) { |
| 241 | if (!isa<Constant>(GEP->getOperand(1)) || |
| 242 | !cast<Constant>(GEP->getOperand(1))->isNullValue()) |
| 243 | return 0; // Using pointer arithmetic to navigate the array... |
| 244 | } |
| 245 | if (!isSafeElementUse(GEP)) return 0; |
| 246 | break; |
| 247 | } |
| 248 | default: |
| 249 | DEBUG(std::cerr << " Transformation preventing inst: " << *User); |
| 250 | return 0; |
| 251 | } |
| 252 | } |
| 253 | return 3; // All users look ok :) |
| 254 | } |
| 255 | |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 256 | /// AllUsersAreLoads - Return true if all users of this value are loads. |
| 257 | static bool AllUsersAreLoads(Value *Ptr) { |
| 258 | for (Value::use_iterator I = Ptr->use_begin(), E = Ptr->use_end(); |
| 259 | I != E; ++I) |
| 260 | if (cast<Instruction>(*I)->getOpcode() != Instruction::Load) |
| 261 | return false; |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 262 | return true; |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 263 | } |
| 264 | |
Chris Lattner | 5e062a1 | 2003-05-30 04:15:41 +0000 | [diff] [blame] | 265 | /// isSafeUseOfAllocation - Check to see if this user is an allowed use for an |
| 266 | /// aggregate allocation. |
| 267 | /// |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 268 | int SROA::isSafeUseOfAllocation(Instruction *User) { |
| 269 | if (!isa<GetElementPtrInst>(User)) return 0; |
Chris Lattner | be883a2 | 2003-11-25 21:09:18 +0000 | [diff] [blame] | 270 | |
| 271 | GetElementPtrInst *GEPI = cast<GetElementPtrInst>(User); |
| 272 | gep_type_iterator I = gep_type_begin(GEPI), E = gep_type_end(GEPI); |
| 273 | |
| 274 | // The GEP is safe to transform if it is of the form GEP <ptr>, 0, <cst> |
| 275 | if (I == E || |
| 276 | I.getOperand() != Constant::getNullValue(I.getOperand()->getType())) |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 277 | return 0; |
Chris Lattner | be883a2 | 2003-11-25 21:09:18 +0000 | [diff] [blame] | 278 | |
| 279 | ++I; |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 280 | if (I == E) return 0; // ran out of GEP indices?? |
Chris Lattner | be883a2 | 2003-11-25 21:09:18 +0000 | [diff] [blame] | 281 | |
| 282 | // If this is a use of an array allocation, do a bit more checking for sanity. |
| 283 | if (const ArrayType *AT = dyn_cast<ArrayType>(*I)) { |
| 284 | uint64_t NumElements = AT->getNumElements(); |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 285 | |
| 286 | if (ConstantInt *CI = dyn_cast<ConstantInt>(I.getOperand())) { |
| 287 | // Check to make sure that index falls within the array. If not, |
| 288 | // something funny is going on, so we won't do the optimization. |
| 289 | // |
| 290 | if (cast<ConstantInt>(GEPI->getOperand(2))->getRawValue() >= NumElements) |
| 291 | return 0; |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 292 | |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 293 | } else { |
| 294 | // If this is an array index and the index is not constant, we cannot |
| 295 | // promote... that is unless the array has exactly one or two elements in |
| 296 | // it, in which case we CAN promote it, but we have to canonicalize this |
| 297 | // out if this is the only problem. |
| 298 | if (NumElements == 1 || NumElements == 2) |
| 299 | return AllUsersAreLoads(GEPI) ? 1 : 0; // Canonicalization required! |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 300 | return 0; |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 301 | } |
Chris Lattner | 5e062a1 | 2003-05-30 04:15:41 +0000 | [diff] [blame] | 302 | } |
Chris Lattner | be883a2 | 2003-11-25 21:09:18 +0000 | [diff] [blame] | 303 | |
| 304 | // If there are any non-simple uses of this getelementptr, make sure to reject |
| 305 | // them. |
| 306 | return isSafeElementUse(GEPI); |
Chris Lattner | 5e062a1 | 2003-05-30 04:15:41 +0000 | [diff] [blame] | 307 | } |
| 308 | |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 309 | /// isSafeStructAllocaToScalarRepl - Check to see if the specified allocation of |
| 310 | /// an aggregate can be broken down into elements. Return 0 if not, 3 if safe, |
| 311 | /// or 1 if safe after canonicalization has been performed. |
Chris Lattner | 5e062a1 | 2003-05-30 04:15:41 +0000 | [diff] [blame] | 312 | /// |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 313 | int SROA::isSafeAllocaToScalarRepl(AllocationInst *AI) { |
Chris Lattner | 5e062a1 | 2003-05-30 04:15:41 +0000 | [diff] [blame] | 314 | // Loop over the use list of the alloca. We can only transform it if all of |
| 315 | // the users are safe to transform. |
| 316 | // |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 317 | int isSafe = 3; |
Chris Lattner | 5e062a1 | 2003-05-30 04:15:41 +0000 | [diff] [blame] | 318 | for (Value::use_iterator I = AI->use_begin(), E = AI->use_end(); |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 319 | I != E; ++I) { |
| 320 | isSafe &= isSafeUseOfAllocation(cast<Instruction>(*I)); |
| 321 | if (isSafe == 0) { |
Chris Lattner | 5e062a1 | 2003-05-30 04:15:41 +0000 | [diff] [blame] | 322 | DEBUG(std::cerr << "Cannot transform: " << *AI << " due to user: " |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 323 | << **I); |
| 324 | return 0; |
Chris Lattner | 5e062a1 | 2003-05-30 04:15:41 +0000 | [diff] [blame] | 325 | } |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 326 | } |
| 327 | // If we require cleanup, isSafe is now 1, otherwise it is 3. |
| 328 | return isSafe; |
| 329 | } |
| 330 | |
| 331 | /// CanonicalizeAllocaUsers - If SROA reported that it can promote the specified |
| 332 | /// allocation, but only if cleaned up, perform the cleanups required. |
| 333 | void SROA::CanonicalizeAllocaUsers(AllocationInst *AI) { |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 334 | // At this point, we know that the end result will be SROA'd and promoted, so |
| 335 | // we can insert ugly code if required so long as sroa+mem2reg will clean it |
| 336 | // up. |
| 337 | for (Value::use_iterator UI = AI->use_begin(), E = AI->use_end(); |
| 338 | UI != E; ) { |
| 339 | GetElementPtrInst *GEPI = cast<GetElementPtrInst>(*UI++); |
Reid Spencer | 96326f9 | 2004-11-15 17:29:41 +0000 | [diff] [blame] | 340 | gep_type_iterator I = gep_type_begin(GEPI); |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 341 | ++I; |
Chris Lattner | f5990ed | 2004-11-14 04:24:28 +0000 | [diff] [blame] | 342 | |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 343 | if (const ArrayType *AT = dyn_cast<ArrayType>(*I)) { |
| 344 | uint64_t NumElements = AT->getNumElements(); |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 345 | |
Chris Lattner | d878ecd | 2004-11-14 05:00:19 +0000 | [diff] [blame] | 346 | if (!isa<ConstantInt>(I.getOperand())) { |
| 347 | if (NumElements == 1) { |
| 348 | GEPI->setOperand(2, Constant::getNullValue(Type::IntTy)); |
| 349 | } else { |
| 350 | assert(NumElements == 2 && "Unhandled case!"); |
| 351 | // All users of the GEP must be loads. At each use of the GEP, insert |
| 352 | // two loads of the appropriate indexed GEP and select between them. |
| 353 | Value *IsOne = BinaryOperator::createSetNE(I.getOperand(), |
| 354 | Constant::getNullValue(I.getOperand()->getType()), |
| 355 | "isone", GEPI); |
| 356 | // Insert the new GEP instructions, which are properly indexed. |
| 357 | std::vector<Value*> Indices(GEPI->op_begin()+1, GEPI->op_end()); |
| 358 | Indices[1] = Constant::getNullValue(Type::IntTy); |
| 359 | Value *ZeroIdx = new GetElementPtrInst(GEPI->getOperand(0), Indices, |
| 360 | GEPI->getName()+".0", GEPI); |
| 361 | Indices[1] = ConstantInt::get(Type::IntTy, 1); |
| 362 | Value *OneIdx = new GetElementPtrInst(GEPI->getOperand(0), Indices, |
| 363 | GEPI->getName()+".1", GEPI); |
| 364 | // Replace all loads of the variable index GEP with loads from both |
| 365 | // indexes and a select. |
| 366 | while (!GEPI->use_empty()) { |
| 367 | LoadInst *LI = cast<LoadInst>(GEPI->use_back()); |
| 368 | Value *Zero = new LoadInst(ZeroIdx, LI->getName()+".0", LI); |
| 369 | Value *One = new LoadInst(OneIdx , LI->getName()+".1", LI); |
| 370 | Value *R = new SelectInst(IsOne, One, Zero, LI->getName(), LI); |
| 371 | LI->replaceAllUsesWith(R); |
| 372 | LI->eraseFromParent(); |
| 373 | } |
| 374 | GEPI->eraseFromParent(); |
| 375 | } |
| 376 | } |
| 377 | } |
| 378 | } |
Chris Lattner | 5e062a1 | 2003-05-30 04:15:41 +0000 | [diff] [blame] | 379 | } |