Chris Lattner | 2240d2b | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 1 | //===- TailRecursionElimination.cpp - Eliminate Tail Calls ----------------===// |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 2 | // |
John Criswell | b576c94 | 2003-10-20 19:43:21 +0000 | [diff] [blame] | 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | 4ee451d | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 7 | // |
John Criswell | b576c94 | 2003-10-20 19:43:21 +0000 | [diff] [blame] | 8 | //===----------------------------------------------------------------------===// |
Chris Lattner | 2240d2b | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 9 | // |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 10 | // This file transforms calls of the current function (self recursion) followed |
| 11 | // by a return instruction with a branch to the entry of the function, creating |
| 12 | // a loop. This pass also implements the following extensions to the basic |
| 13 | // algorithm: |
Chris Lattner | 2240d2b | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 14 | // |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 15 | // 1. Trivial instructions between the call and return do not prevent the |
| 16 | // transformation from taking place, though currently the analysis cannot |
| 17 | // support moving any really useful instructions (only dead ones). |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 18 | // 2. This pass transforms functions that are prevented from being tail |
Duncan Sands | 24080a9 | 2010-07-10 20:31:42 +0000 | [diff] [blame] | 19 | // recursive by an associative and commutative expression to use an |
| 20 | // accumulator variable, thus compiling the typical naive factorial or |
| 21 | // 'fib' implementation into efficient code. |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 22 | // 3. TRE is performed if the function returns void, if the return |
| 23 | // returns the result returned by the call, or if the function returns a |
| 24 | // run-time constant on all exits from the function. It is possible, though |
| 25 | // unlikely, that the return returns something else (like constant 0), and |
| 26 | // can still be TRE'd. It can be TRE'd if ALL OTHER return instructions in |
| 27 | // the function return the exact same value. |
Nick Lewycky | 0cade26 | 2009-11-07 07:10:01 +0000 | [diff] [blame] | 28 | // 4. If it can prove that callees do not access their caller stack frame, |
Chris Lattner | 7f78f21 | 2005-05-09 23:51:13 +0000 | [diff] [blame] | 29 | // they are marked as eligible for tail call elimination (by the code |
| 30 | // generator). |
Chris Lattner | 2240d2b | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 31 | // |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 32 | // There are several improvements that could be made: |
| 33 | // |
| 34 | // 1. If the function has any alloca instructions, these instructions will be |
| 35 | // moved out of the entry block of the function, causing them to be |
| 36 | // evaluated each time through the tail recursion. Safely keeping allocas |
| 37 | // in the entry block requires analysis to proves that the tail-called |
| 38 | // function does not read or write the stack object. |
Chris Lattner | 7a2bdde | 2011-04-15 05:18:47 +0000 | [diff] [blame] | 39 | // 2. Tail recursion is only performed if the call immediately precedes the |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 40 | // return instruction. It's possible that there could be a jump between |
| 41 | // the call and the return. |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 42 | // 3. There can be intervening operations between the call and the return that |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 43 | // prevent the TRE from occurring. For example, there could be GEP's and |
| 44 | // stores to memory that will not be read or written by the call. This |
| 45 | // requires some substantial analysis (such as with DSA) to prove safe to |
| 46 | // move ahead of the call, but doing so could allow many more TREs to be |
| 47 | // performed, for example in TreeAdd/TreeAlloc from the treeadd benchmark. |
Chris Lattner | 7f78f21 | 2005-05-09 23:51:13 +0000 | [diff] [blame] | 48 | // 4. The algorithm we use to detect if callees access their caller stack |
| 49 | // frames is very primitive. |
Chris Lattner | 2240d2b | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 50 | // |
| 51 | //===----------------------------------------------------------------------===// |
| 52 | |
Chris Lattner | 3fc6ef1 | 2003-09-20 05:14:13 +0000 | [diff] [blame] | 53 | #include "llvm/Transforms/Scalar.h" |
Chandler Carruth | d04a8d4 | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 54 | #include "llvm/ADT/STLExtras.h" |
Michael Gottesman | 03fddb7 | 2013-07-11 04:40:01 +0000 | [diff] [blame] | 55 | #include "llvm/ADT/SmallPtrSet.h" |
Chandler Carruth | d04a8d4 | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 56 | #include "llvm/ADT/Statistic.h" |
| 57 | #include "llvm/Analysis/CaptureTracking.h" |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 58 | #include "llvm/Analysis/CFG.h" |
Chandler Carruth | d04a8d4 | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 59 | #include "llvm/Analysis/InlineCost.h" |
| 60 | #include "llvm/Analysis/InstructionSimplify.h" |
| 61 | #include "llvm/Analysis/Loads.h" |
Chandler Carruth | 13086a6 | 2013-01-22 11:26:02 +0000 | [diff] [blame] | 62 | #include "llvm/Analysis/TargetTransformInfo.h" |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 63 | #include "llvm/IR/CFG.h" |
| 64 | #include "llvm/IR/CallSite.h" |
Chandler Carruth | 0b8c9a8 | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 65 | #include "llvm/IR/Constants.h" |
| 66 | #include "llvm/IR/DerivedTypes.h" |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 67 | #include "llvm/IR/DiagnosticInfo.h" |
Chandler Carruth | 0b8c9a8 | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 68 | #include "llvm/IR/Function.h" |
| 69 | #include "llvm/IR/Instructions.h" |
| 70 | #include "llvm/IR/IntrinsicInst.h" |
| 71 | #include "llvm/IR/Module.h" |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 72 | #include "llvm/IR/ValueHandle.h" |
Chris Lattner | 2240d2b | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 73 | #include "llvm/Pass.h" |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 74 | #include "llvm/Support/Debug.h" |
Francois Pichet | 337c081 | 2011-01-29 20:06:16 +0000 | [diff] [blame] | 75 | #include "llvm/Support/raw_ostream.h" |
Chandler Carruth | d04a8d4 | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 76 | #include "llvm/Transforms/Utils/BasicBlockUtils.h" |
| 77 | #include "llvm/Transforms/Utils/Local.h" |
Chris Lattner | f8485c6 | 2003-11-20 18:25:24 +0000 | [diff] [blame] | 78 | using namespace llvm; |
Brian Gaeke | d0fde30 | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 79 | |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 80 | #define DEBUG_TYPE "tailcallelim" |
| 81 | |
Chris Lattner | 0e5f499 | 2006-12-19 21:40:18 +0000 | [diff] [blame] | 82 | STATISTIC(NumEliminated, "Number of tail calls removed"); |
Evan Cheng | 60f5ad4 | 2011-01-29 04:53:35 +0000 | [diff] [blame] | 83 | STATISTIC(NumRetDuped, "Number of return duplicated"); |
Chris Lattner | 0e5f499 | 2006-12-19 21:40:18 +0000 | [diff] [blame] | 84 | STATISTIC(NumAccumAdded, "Number of accumulators introduced"); |
Chris Lattner | 2240d2b | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 85 | |
Chris Lattner | 0e5f499 | 2006-12-19 21:40:18 +0000 | [diff] [blame] | 86 | namespace { |
Chris Lattner | 3e8b663 | 2009-09-02 06:11:42 +0000 | [diff] [blame] | 87 | struct TailCallElim : public FunctionPass { |
Chandler Carruth | 13086a6 | 2013-01-22 11:26:02 +0000 | [diff] [blame] | 88 | const TargetTransformInfo *TTI; |
| 89 | |
Nick Lewycky | ecd94c8 | 2007-05-06 13:37:16 +0000 | [diff] [blame] | 90 | static char ID; // Pass identification, replacement for typeid |
Owen Anderson | 081c34b | 2010-10-19 17:21:58 +0000 | [diff] [blame] | 91 | TailCallElim() : FunctionPass(ID) { |
| 92 | initializeTailCallElimPass(*PassRegistry::getPassRegistry()); |
| 93 | } |
Devang Patel | 794fd75 | 2007-05-01 21:15:47 +0000 | [diff] [blame] | 94 | |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 95 | void getAnalysisUsage(AnalysisUsage &AU) const override; |
Chandler Carruth | 13086a6 | 2013-01-22 11:26:02 +0000 | [diff] [blame] | 96 | |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 97 | bool runOnFunction(Function &F) override; |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 98 | |
| 99 | private: |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 100 | bool runTRE(Function &F); |
| 101 | bool markTails(Function &F, bool &AllCallsAreTailCalls); |
| 102 | |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 103 | CallInst *FindTRECandidate(Instruction *I, |
| 104 | bool CannotTailCallElimCallsMarkedTail); |
| 105 | bool EliminateRecursiveTailCall(CallInst *CI, ReturnInst *Ret, |
| 106 | BasicBlock *&OldEntry, |
| 107 | bool &TailCallsAreMarkedTail, |
Craig Topper | a0ec3f9 | 2013-07-14 04:42:23 +0000 | [diff] [blame] | 108 | SmallVectorImpl<PHINode *> &ArgumentPHIs, |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 109 | bool CannotTailCallElimCallsMarkedTail); |
| 110 | bool FoldReturnAndProcessPred(BasicBlock *BB, |
| 111 | ReturnInst *Ret, BasicBlock *&OldEntry, |
| 112 | bool &TailCallsAreMarkedTail, |
Craig Topper | a0ec3f9 | 2013-07-14 04:42:23 +0000 | [diff] [blame] | 113 | SmallVectorImpl<PHINode *> &ArgumentPHIs, |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 114 | bool CannotTailCallElimCallsMarkedTail); |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 115 | bool ProcessReturningBlock(ReturnInst *RI, BasicBlock *&OldEntry, |
Chris Lattner | ce869ee | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 116 | bool &TailCallsAreMarkedTail, |
Craig Topper | a0ec3f9 | 2013-07-14 04:42:23 +0000 | [diff] [blame] | 117 | SmallVectorImpl<PHINode *> &ArgumentPHIs, |
Chris Lattner | ce869ee | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 118 | bool CannotTailCallElimCallsMarkedTail); |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 119 | bool CanMoveAboveCall(Instruction *I, CallInst *CI); |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 120 | Value *CanTransformAccumulatorRecursion(Instruction *I, CallInst *CI); |
Chris Lattner | 2240d2b | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 121 | }; |
Chris Lattner | 2240d2b | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 122 | } |
| 123 | |
Dan Gohman | 844731a | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 124 | char TailCallElim::ID = 0; |
Chandler Carruth | 13086a6 | 2013-01-22 11:26:02 +0000 | [diff] [blame] | 125 | INITIALIZE_PASS_BEGIN(TailCallElim, "tailcallelim", |
| 126 | "Tail Call Elimination", false, false) |
| 127 | INITIALIZE_AG_DEPENDENCY(TargetTransformInfo) |
| 128 | INITIALIZE_PASS_END(TailCallElim, "tailcallelim", |
| 129 | "Tail Call Elimination", false, false) |
Dan Gohman | 844731a | 2008-05-13 00:00:25 +0000 | [diff] [blame] | 130 | |
Brian Gaeke | d0fde30 | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 131 | // Public interface to the TailCallElimination pass |
Chris Lattner | f8485c6 | 2003-11-20 18:25:24 +0000 | [diff] [blame] | 132 | FunctionPass *llvm::createTailCallEliminationPass() { |
| 133 | return new TailCallElim(); |
| 134 | } |
Chris Lattner | 3fc6ef1 | 2003-09-20 05:14:13 +0000 | [diff] [blame] | 135 | |
Chandler Carruth | 13086a6 | 2013-01-22 11:26:02 +0000 | [diff] [blame] | 136 | void TailCallElim::getAnalysisUsage(AnalysisUsage &AU) const { |
| 137 | AU.addRequired<TargetTransformInfo>(); |
| 138 | } |
| 139 | |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 140 | /// \brief Scan the specified function for alloca instructions. |
| 141 | /// If it contains any dynamic allocas, returns false. |
| 142 | static bool CanTRE(Function &F) { |
| 143 | // Because of PR962, we don't TRE dynamic allocas. |
| 144 | for (auto &BB : F) { |
| 145 | for (auto &I : BB) { |
| 146 | if (AllocaInst *AI = dyn_cast<AllocaInst>(&I)) { |
| 147 | if (!AI->isStaticAlloca()) |
| 148 | return false; |
| 149 | } |
| 150 | } |
| 151 | } |
Michael Gottesman | 03fddb7 | 2013-07-11 04:40:01 +0000 | [diff] [blame] | 152 | |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 153 | return true; |
Nick Lewycky | cb19438 | 2009-11-07 07:42:38 +0000 | [diff] [blame] | 154 | } |
| 155 | |
Chris Lattner | 2240d2b | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 156 | bool TailCallElim::runOnFunction(Function &F) { |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 157 | if (skipOptnoneFunction(F)) |
| 158 | return false; |
| 159 | |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 160 | bool AllCallsAreTailCalls = false; |
| 161 | bool Modified = markTails(F, AllCallsAreTailCalls); |
| 162 | if (AllCallsAreTailCalls) |
| 163 | Modified |= runTRE(F); |
| 164 | return Modified; |
| 165 | } |
| 166 | |
| 167 | namespace { |
| 168 | struct AllocaDerivedValueTracker { |
| 169 | // Start at a root value and walk its use-def chain to mark calls that use the |
| 170 | // value or a derived value in AllocaUsers, and places where it may escape in |
| 171 | // EscapePoints. |
| 172 | void walk(Value *Root) { |
| 173 | SmallVector<Use *, 32> Worklist; |
| 174 | SmallPtrSet<Use *, 32> Visited; |
| 175 | |
| 176 | auto AddUsesToWorklist = [&](Value *V) { |
| 177 | for (auto &U : V->uses()) { |
| 178 | if (!Visited.insert(&U)) |
| 179 | continue; |
| 180 | Worklist.push_back(&U); |
| 181 | } |
| 182 | }; |
| 183 | |
| 184 | AddUsesToWorklist(Root); |
| 185 | |
| 186 | while (!Worklist.empty()) { |
| 187 | Use *U = Worklist.pop_back_val(); |
| 188 | Instruction *I = cast<Instruction>(U->getUser()); |
| 189 | |
| 190 | switch (I->getOpcode()) { |
| 191 | case Instruction::Call: |
| 192 | case Instruction::Invoke: { |
| 193 | CallSite CS(I); |
| 194 | bool IsNocapture = !CS.isCallee(U) && |
| 195 | CS.doesNotCapture(CS.getArgumentNo(U)); |
| 196 | callUsesLocalStack(CS, IsNocapture); |
| 197 | if (IsNocapture) { |
| 198 | // If the alloca-derived argument is passed in as nocapture, then it |
| 199 | // can't propagate to the call's return. That would be capturing. |
| 200 | continue; |
| 201 | } |
| 202 | break; |
| 203 | } |
| 204 | case Instruction::Load: { |
| 205 | // The result of a load is not alloca-derived (unless an alloca has |
| 206 | // otherwise escaped, but this is a local analysis). |
| 207 | continue; |
| 208 | } |
| 209 | case Instruction::Store: { |
| 210 | if (U->getOperandNo() == 0) |
| 211 | EscapePoints.insert(I); |
| 212 | continue; // Stores have no users to analyze. |
| 213 | } |
| 214 | case Instruction::BitCast: |
| 215 | case Instruction::GetElementPtr: |
| 216 | case Instruction::PHI: |
| 217 | case Instruction::Select: |
| 218 | case Instruction::AddrSpaceCast: |
| 219 | break; |
| 220 | default: |
| 221 | EscapePoints.insert(I); |
| 222 | break; |
| 223 | } |
| 224 | |
| 225 | AddUsesToWorklist(I); |
| 226 | } |
| 227 | } |
| 228 | |
| 229 | void callUsesLocalStack(CallSite CS, bool IsNocapture) { |
| 230 | // Add it to the list of alloca users. If it's already there, skip further |
| 231 | // processing. |
| 232 | if (!AllocaUsers.insert(CS.getInstruction())) |
| 233 | return; |
| 234 | |
| 235 | // If it's nocapture then it can't capture the alloca. |
| 236 | if (IsNocapture) |
| 237 | return; |
| 238 | |
| 239 | // If it can write to memory, it can leak the alloca value. |
| 240 | if (!CS.onlyReadsMemory()) |
| 241 | EscapePoints.insert(CS.getInstruction()); |
| 242 | } |
| 243 | |
| 244 | SmallPtrSet<Instruction *, 32> AllocaUsers; |
| 245 | SmallPtrSet<Instruction *, 32> EscapePoints; |
| 246 | }; |
| 247 | } |
| 248 | |
| 249 | bool TailCallElim::markTails(Function &F, bool &AllCallsAreTailCalls) { |
| 250 | if (F.callsFunctionThatReturnsTwice()) |
| 251 | return false; |
| 252 | AllCallsAreTailCalls = true; |
| 253 | |
| 254 | // The local stack holds all alloca instructions and all byval arguments. |
| 255 | AllocaDerivedValueTracker Tracker; |
| 256 | for (Argument &Arg : F.args()) { |
| 257 | if (Arg.hasByValAttr()) |
| 258 | Tracker.walk(&Arg); |
| 259 | } |
| 260 | for (auto &BB : F) { |
| 261 | for (auto &I : BB) |
| 262 | if (AllocaInst *AI = dyn_cast<AllocaInst>(&I)) |
| 263 | Tracker.walk(AI); |
| 264 | } |
| 265 | |
| 266 | bool Modified = false; |
| 267 | |
| 268 | // Track whether a block is reachable after an alloca has escaped. Blocks that |
| 269 | // contain the escaping instruction will be marked as being visited without an |
| 270 | // escaped alloca, since that is how the block began. |
| 271 | enum VisitType { |
| 272 | UNVISITED, |
| 273 | UNESCAPED, |
| 274 | ESCAPED |
| 275 | }; |
| 276 | DenseMap<BasicBlock *, VisitType> Visited; |
| 277 | |
| 278 | // We propagate the fact that an alloca has escaped from block to successor. |
| 279 | // Visit the blocks that are propagating the escapedness first. To do this, we |
| 280 | // maintain two worklists. |
| 281 | SmallVector<BasicBlock *, 32> WorklistUnescaped, WorklistEscaped; |
| 282 | |
| 283 | // We may enter a block and visit it thinking that no alloca has escaped yet, |
| 284 | // then see an escape point and go back around a loop edge and come back to |
| 285 | // the same block twice. Because of this, we defer setting tail on calls when |
| 286 | // we first encounter them in a block. Every entry in this list does not |
| 287 | // statically use an alloca via use-def chain analysis, but may find an alloca |
| 288 | // through other means if the block turns out to be reachable after an escape |
| 289 | // point. |
| 290 | SmallVector<CallInst *, 32> DeferredTails; |
| 291 | |
| 292 | BasicBlock *BB = &F.getEntryBlock(); |
| 293 | VisitType Escaped = UNESCAPED; |
| 294 | do { |
| 295 | for (auto &I : *BB) { |
| 296 | if (Tracker.EscapePoints.count(&I)) |
| 297 | Escaped = ESCAPED; |
| 298 | |
| 299 | CallInst *CI = dyn_cast<CallInst>(&I); |
| 300 | if (!CI || CI->isTailCall()) |
| 301 | continue; |
| 302 | |
| 303 | if (CI->doesNotAccessMemory()) { |
| 304 | // A call to a readnone function whose arguments are all things computed |
| 305 | // outside this function can be marked tail. Even if you stored the |
| 306 | // alloca address into a global, a readnone function can't load the |
| 307 | // global anyhow. |
| 308 | // |
| 309 | // Note that this runs whether we know an alloca has escaped or not. If |
| 310 | // it has, then we can't trust Tracker.AllocaUsers to be accurate. |
| 311 | bool SafeToTail = true; |
| 312 | for (auto &Arg : CI->arg_operands()) { |
| 313 | if (isa<Constant>(Arg.getUser())) |
| 314 | continue; |
| 315 | if (Argument *A = dyn_cast<Argument>(Arg.getUser())) |
| 316 | if (!A->hasByValAttr()) |
| 317 | continue; |
| 318 | SafeToTail = false; |
| 319 | break; |
| 320 | } |
| 321 | if (SafeToTail) { |
| 322 | emitOptimizationRemark( |
| 323 | F.getContext(), "tailcallelim", F, CI->getDebugLoc(), |
| 324 | "marked this readnone call a tail call candidate"); |
| 325 | CI->setTailCall(); |
| 326 | Modified = true; |
| 327 | continue; |
| 328 | } |
| 329 | } |
| 330 | |
| 331 | if (Escaped == UNESCAPED && !Tracker.AllocaUsers.count(CI)) { |
| 332 | DeferredTails.push_back(CI); |
| 333 | } else { |
| 334 | AllCallsAreTailCalls = false; |
| 335 | } |
| 336 | } |
| 337 | |
| 338 | for (auto *SuccBB : make_range(succ_begin(BB), succ_end(BB))) { |
| 339 | auto &State = Visited[SuccBB]; |
| 340 | if (State < Escaped) { |
| 341 | State = Escaped; |
| 342 | if (State == ESCAPED) |
| 343 | WorklistEscaped.push_back(SuccBB); |
| 344 | else |
| 345 | WorklistUnescaped.push_back(SuccBB); |
| 346 | } |
| 347 | } |
| 348 | |
| 349 | if (!WorklistEscaped.empty()) { |
| 350 | BB = WorklistEscaped.pop_back_val(); |
| 351 | Escaped = ESCAPED; |
| 352 | } else { |
| 353 | BB = nullptr; |
| 354 | while (!WorklistUnescaped.empty()) { |
| 355 | auto *NextBB = WorklistUnescaped.pop_back_val(); |
| 356 | if (Visited[NextBB] == UNESCAPED) { |
| 357 | BB = NextBB; |
| 358 | Escaped = UNESCAPED; |
| 359 | break; |
| 360 | } |
| 361 | } |
| 362 | } |
| 363 | } while (BB); |
| 364 | |
| 365 | for (CallInst *CI : DeferredTails) { |
| 366 | if (Visited[CI->getParent()] != ESCAPED) { |
| 367 | // If the escape point was part way through the block, calls after the |
| 368 | // escape point wouldn't have been put into DeferredTails. |
| 369 | emitOptimizationRemark(F.getContext(), "tailcallelim", F, |
| 370 | CI->getDebugLoc(), |
| 371 | "marked this call a tail call candidate"); |
| 372 | CI->setTailCall(); |
| 373 | Modified = true; |
| 374 | } else { |
| 375 | AllCallsAreTailCalls = false; |
| 376 | } |
| 377 | } |
| 378 | |
| 379 | return Modified; |
| 380 | } |
| 381 | |
| 382 | bool TailCallElim::runTRE(Function &F) { |
Chris Lattner | 2240d2b | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 383 | // If this function is a varargs function, we won't be able to PHI the args |
| 384 | // right, so don't even try to convert it... |
| 385 | if (F.getFunctionType()->isVarArg()) return false; |
| 386 | |
Chandler Carruth | 13086a6 | 2013-01-22 11:26:02 +0000 | [diff] [blame] | 387 | TTI = &getAnalysis<TargetTransformInfo>(); |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 388 | BasicBlock *OldEntry = nullptr; |
Chris Lattner | ce869ee | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 389 | bool TailCallsAreMarkedTail = false; |
Nick Lewycky | 0cade26 | 2009-11-07 07:10:01 +0000 | [diff] [blame] | 390 | SmallVector<PHINode*, 8> ArgumentPHIs; |
Chris Lattner | 2240d2b | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 391 | bool MadeChange = false; |
Chris Lattner | 7f78f21 | 2005-05-09 23:51:13 +0000 | [diff] [blame] | 392 | |
Michael Gottesman | 03fddb7 | 2013-07-11 04:40:01 +0000 | [diff] [blame] | 393 | // CanTRETailMarkedCall - If false, we cannot perform TRE on tail calls |
Chris Lattner | ce869ee | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 394 | // marked with the 'tail' attribute, because doing so would cause the stack |
Michael Gottesman | 03fddb7 | 2013-07-11 04:40:01 +0000 | [diff] [blame] | 395 | // size to increase (real TRE would deallocate variable sized allocas, TRE |
Chris Lattner | ce869ee | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 396 | // doesn't). |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 397 | bool CanTRETailMarkedCall = CanTRE(F); |
Chris Lattner | ce869ee | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 398 | |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 399 | // Change any tail recursive calls to loops. |
Michael Gottesman | 03fddb7 | 2013-07-11 04:40:01 +0000 | [diff] [blame] | 400 | // |
| 401 | // FIXME: The code generator produces really bad code when an 'escaping |
| 402 | // alloca' is changed from being a static alloca to being a dynamic alloca. |
| 403 | // Until this is resolved, disable this transformation if that would ever |
| 404 | // happen. This bug is PR962. |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 405 | for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB) { |
| 406 | if (ReturnInst *Ret = dyn_cast<ReturnInst>(BB->getTerminator())) { |
| 407 | bool Change = ProcessReturningBlock(Ret, OldEntry, TailCallsAreMarkedTail, |
| 408 | ArgumentPHIs, !CanTRETailMarkedCall); |
| 409 | if (!Change && BB->getFirstNonPHIOrDbg() == Ret) |
| 410 | Change = FoldReturnAndProcessPred(BB, Ret, OldEntry, |
| 411 | TailCallsAreMarkedTail, ArgumentPHIs, |
| 412 | !CanTRETailMarkedCall); |
| 413 | MadeChange |= Change; |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 414 | } |
| 415 | } |
Chris Lattner | ce869ee | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 416 | |
Chris Lattner | cf2f892 | 2003-12-08 23:37:35 +0000 | [diff] [blame] | 417 | // If we eliminated any tail recursions, it's possible that we inserted some |
| 418 | // silly PHI nodes which just merge an initial value (the incoming operand) |
| 419 | // with themselves. Check to see if we did and clean up our mess if so. This |
| 420 | // occurs when a function passes an argument straight through to its tail |
| 421 | // call. |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 422 | for (unsigned i = 0, e = ArgumentPHIs.size(); i != e; ++i) { |
| 423 | PHINode *PN = ArgumentPHIs[i]; |
Chris Lattner | cf2f892 | 2003-12-08 23:37:35 +0000 | [diff] [blame] | 424 | |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 425 | // If the PHI Node is a dynamic constant, replace it with the value it is. |
| 426 | if (Value *PNV = SimplifyInstruction(PN)) { |
| 427 | PN->replaceAllUsesWith(PNV); |
| 428 | PN->eraseFromParent(); |
Michael Gottesman | 03fddb7 | 2013-07-11 04:40:01 +0000 | [diff] [blame] | 429 | } |
| 430 | } |
Chris Lattner | 7f78f21 | 2005-05-09 23:51:13 +0000 | [diff] [blame] | 431 | |
Chris Lattner | 2240d2b | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 432 | return MadeChange; |
| 433 | } |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 434 | |
Argyrios Kyrtzidis | 0b06e23 | 2012-10-22 18:16:14 +0000 | [diff] [blame] | 435 | |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 436 | /// CanMoveAboveCall - Return true if it is safe to move the specified |
| 437 | /// instruction from after the call to before the call, assuming that all |
| 438 | /// instructions between the call and this instruction are movable. |
| 439 | /// |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 440 | bool TailCallElim::CanMoveAboveCall(Instruction *I, CallInst *CI) { |
| 441 | // FIXME: We can move load/store/call/free instructions above the call if the |
| 442 | // call does not mod/ref the memory location being processed. |
Chris Lattner | 6a35b40 | 2009-06-19 04:22:16 +0000 | [diff] [blame] | 443 | if (I->mayHaveSideEffects()) // This also handles volatile loads. |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 444 | return false; |
Nadav Rotem | a94d6e8 | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 445 | |
Nick Lewycky | 0cade26 | 2009-11-07 07:10:01 +0000 | [diff] [blame] | 446 | if (LoadInst *L = dyn_cast<LoadInst>(I)) { |
Chris Lattner | 6a35b40 | 2009-06-19 04:22:16 +0000 | [diff] [blame] | 447 | // Loads may always be moved above calls without side effects. |
| 448 | if (CI->mayHaveSideEffects()) { |
| 449 | // Non-volatile loads may be moved above a call with side effects if it |
| 450 | // does not write to memory and the load provably won't trap. |
| 451 | // FIXME: Writes to memory only matter if they may alias the pointer |
| 452 | // being loaded from. |
| 453 | if (CI->mayWriteToMemory() || |
Bob Wilson | 49db68f | 2010-01-30 04:42:39 +0000 | [diff] [blame] | 454 | !isSafeToLoadUnconditionally(L->getPointerOperand(), L, |
| 455 | L->getAlignment())) |
Chris Lattner | 6a35b40 | 2009-06-19 04:22:16 +0000 | [diff] [blame] | 456 | return false; |
| 457 | } |
| 458 | } |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 459 | |
| 460 | // Otherwise, if this is a side-effect free instruction, check to make sure |
| 461 | // that it does not use the return value of the call. If it doesn't use the |
| 462 | // return value of the call, it must only use things that are defined before |
| 463 | // the call, or movable instructions between the call and the instruction |
| 464 | // itself. |
| 465 | for (unsigned i = 0, e = I->getNumOperands(); i != e; ++i) |
| 466 | if (I->getOperand(i) == CI) |
| 467 | return false; |
| 468 | return true; |
| 469 | } |
| 470 | |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 471 | // isDynamicConstant - Return true if the specified value is the same when the |
| 472 | // return would exit as it was when the initial iteration of the recursive |
| 473 | // function was executed. |
| 474 | // |
| 475 | // We currently handle static constants and arguments that are not modified as |
| 476 | // part of the recursion. |
| 477 | // |
Nick Lewycky | f80fcd0 | 2009-11-07 21:10:15 +0000 | [diff] [blame] | 478 | static bool isDynamicConstant(Value *V, CallInst *CI, ReturnInst *RI) { |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 479 | if (isa<Constant>(V)) return true; // Static constants are always dyn consts |
| 480 | |
| 481 | // Check to see if this is an immutable argument, if so, the value |
| 482 | // will be available to initialize the accumulator. |
| 483 | if (Argument *Arg = dyn_cast<Argument>(V)) { |
| 484 | // Figure out which argument number this is... |
| 485 | unsigned ArgNo = 0; |
| 486 | Function *F = CI->getParent()->getParent(); |
Chris Lattner | e4d5c44 | 2005-03-15 04:54:21 +0000 | [diff] [blame] | 487 | for (Function::arg_iterator AI = F->arg_begin(); &*AI != Arg; ++AI) |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 488 | ++ArgNo; |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 489 | |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 490 | // If we are passing this argument into call as the corresponding |
| 491 | // argument operand, then the argument is dynamically constant. |
| 492 | // Otherwise, we cannot transform this function safely. |
Gabor Greif | de9f545 | 2010-06-24 00:44:01 +0000 | [diff] [blame] | 493 | if (CI->getArgOperand(ArgNo) == Arg) |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 494 | return true; |
| 495 | } |
Nick Lewycky | f80fcd0 | 2009-11-07 21:10:15 +0000 | [diff] [blame] | 496 | |
| 497 | // Switch cases are always constant integers. If the value is being switched |
| 498 | // on and the return is only reachable from one of its cases, it's |
| 499 | // effectively constant. |
| 500 | if (BasicBlock *UniquePred = RI->getParent()->getUniquePredecessor()) |
| 501 | if (SwitchInst *SI = dyn_cast<SwitchInst>(UniquePred->getTerminator())) |
| 502 | if (SI->getCondition() == V) |
| 503 | return SI->getDefaultDest() != RI->getParent(); |
| 504 | |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 505 | // Not a constant or immutable argument, we can't safely transform. |
| 506 | return false; |
| 507 | } |
| 508 | |
| 509 | // getCommonReturnValue - Check to see if the function containing the specified |
Duncan Sands | d50e9e2 | 2010-06-26 12:53:31 +0000 | [diff] [blame] | 510 | // tail call consistently returns the same runtime-constant value at all exit |
| 511 | // points except for IgnoreRI. If so, return the returned value. |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 512 | // |
Duncan Sands | d50e9e2 | 2010-06-26 12:53:31 +0000 | [diff] [blame] | 513 | static Value *getCommonReturnValue(ReturnInst *IgnoreRI, CallInst *CI) { |
| 514 | Function *F = CI->getParent()->getParent(); |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 515 | Value *ReturnedValue = nullptr; |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 516 | |
Chris Lattner | b5d84d1 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 517 | for (Function::iterator BBI = F->begin(), E = F->end(); BBI != E; ++BBI) { |
| 518 | ReturnInst *RI = dyn_cast<ReturnInst>(BBI->getTerminator()); |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 519 | if (RI == nullptr || RI == IgnoreRI) continue; |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 520 | |
Chris Lattner | b5d84d1 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 521 | // We can only perform this transformation if the value returned is |
| 522 | // evaluatable at the start of the initial invocation of the function, |
| 523 | // instead of at the end of the evaluation. |
| 524 | // |
| 525 | Value *RetOp = RI->getOperand(0); |
| 526 | if (!isDynamicConstant(RetOp, CI, RI)) |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 527 | return nullptr; |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 528 | |
Chris Lattner | b5d84d1 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 529 | if (ReturnedValue && RetOp != ReturnedValue) |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 530 | return nullptr; // Cannot transform if differing values are returned. |
Chris Lattner | b5d84d1 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 531 | ReturnedValue = RetOp; |
| 532 | } |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 533 | return ReturnedValue; |
| 534 | } |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 535 | |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 536 | /// CanTransformAccumulatorRecursion - If the specified instruction can be |
| 537 | /// transformed using accumulator recursion elimination, return the constant |
| 538 | /// which is the start of the accumulator value. Otherwise return null. |
| 539 | /// |
| 540 | Value *TailCallElim::CanTransformAccumulatorRecursion(Instruction *I, |
| 541 | CallInst *CI) { |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 542 | if (!I->isAssociative() || !I->isCommutative()) return nullptr; |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 543 | assert(I->getNumOperands() == 2 && |
Duncan Sands | 24080a9 | 2010-07-10 20:31:42 +0000 | [diff] [blame] | 544 | "Associative/commutative operations should have 2 args!"); |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 545 | |
Chris Lattner | b5d84d1 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 546 | // Exactly one operand should be the result of the call instruction. |
Anton Korobeynikov | 07e6e56 | 2008-02-20 11:26:25 +0000 | [diff] [blame] | 547 | if ((I->getOperand(0) == CI && I->getOperand(1) == CI) || |
| 548 | (I->getOperand(0) != CI && I->getOperand(1) != CI)) |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 549 | return nullptr; |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 550 | |
| 551 | // The only user of this instruction we allow is a single return instruction. |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 552 | if (!I->hasOneUse() || !isa<ReturnInst>(I->user_back())) |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 553 | return nullptr; |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 554 | |
| 555 | // Ok, now we have to check all of the other return instructions in this |
| 556 | // function. If they return non-constants or differing values, then we cannot |
| 557 | // transform the function safely. |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 558 | return getCommonReturnValue(cast<ReturnInst>(I->user_back()), CI); |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 559 | } |
| 560 | |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 561 | static Instruction *FirstNonDbg(BasicBlock::iterator I) { |
| 562 | while (isa<DbgInfoIntrinsic>(I)) |
| 563 | ++I; |
| 564 | return &*I; |
| 565 | } |
| 566 | |
| 567 | CallInst* |
| 568 | TailCallElim::FindTRECandidate(Instruction *TI, |
| 569 | bool CannotTailCallElimCallsMarkedTail) { |
| 570 | BasicBlock *BB = TI->getParent(); |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 571 | Function *F = BB->getParent(); |
| 572 | |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 573 | if (&BB->front() == TI) // Make sure there is something before the terminator. |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 574 | return nullptr; |
Nadav Rotem | a94d6e8 | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 575 | |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 576 | // Scan backwards from the return, checking to see if there is a tail call in |
| 577 | // this block. If so, set CI to it. |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 578 | CallInst *CI = nullptr; |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 579 | BasicBlock::iterator BBI = TI; |
| 580 | while (true) { |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 581 | CI = dyn_cast<CallInst>(BBI); |
| 582 | if (CI && CI->getCalledFunction() == F) |
| 583 | break; |
| 584 | |
| 585 | if (BBI == BB->begin()) |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 586 | return nullptr; // Didn't find a potential tail call. |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 587 | --BBI; |
| 588 | } |
| 589 | |
Chris Lattner | ce869ee | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 590 | // If this call is marked as a tail call, and if there are dynamic allocas in |
| 591 | // the function, we cannot perform this optimization. |
| 592 | if (CI->isTailCall() && CannotTailCallElimCallsMarkedTail) |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 593 | return nullptr; |
Chris Lattner | ce869ee | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 594 | |
Dan Gohman | ea25b48 | 2010-04-16 15:57:50 +0000 | [diff] [blame] | 595 | // As a special case, detect code like this: |
| 596 | // double fabs(double f) { return __builtin_fabs(f); } // a 'fabs' call |
| 597 | // and disable this xform in this case, because the code generator will |
| 598 | // lower the call to fabs into inline code. |
Nadav Rotem | a94d6e8 | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 599 | if (BB == &F->getEntryBlock() && |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 600 | FirstNonDbg(BB->front()) == CI && |
Stephen Hines | 36b5688 | 2014-04-23 16:57:46 -0700 | [diff] [blame] | 601 | FirstNonDbg(std::next(BB->begin())) == TI && |
Chandler Carruth | 13086a6 | 2013-01-22 11:26:02 +0000 | [diff] [blame] | 602 | CI->getCalledFunction() && |
| 603 | !TTI->isLoweredToCall(CI->getCalledFunction())) { |
Dan Gohman | ea25b48 | 2010-04-16 15:57:50 +0000 | [diff] [blame] | 604 | // A single-block function with just a call and a return. Check that |
| 605 | // the arguments match. |
| 606 | CallSite::arg_iterator I = CallSite(CI).arg_begin(), |
| 607 | E = CallSite(CI).arg_end(); |
| 608 | Function::arg_iterator FI = F->arg_begin(), |
| 609 | FE = F->arg_end(); |
| 610 | for (; I != E && FI != FE; ++I, ++FI) |
| 611 | if (*I != &*FI) break; |
| 612 | if (I == E && FI == FE) |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 613 | return nullptr; |
Dan Gohman | ea25b48 | 2010-04-16 15:57:50 +0000 | [diff] [blame] | 614 | } |
| 615 | |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 616 | return CI; |
| 617 | } |
| 618 | |
| 619 | bool TailCallElim::EliminateRecursiveTailCall(CallInst *CI, ReturnInst *Ret, |
| 620 | BasicBlock *&OldEntry, |
| 621 | bool &TailCallsAreMarkedTail, |
Craig Topper | a0ec3f9 | 2013-07-14 04:42:23 +0000 | [diff] [blame] | 622 | SmallVectorImpl<PHINode *> &ArgumentPHIs, |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 623 | bool CannotTailCallElimCallsMarkedTail) { |
Duncan Sands | 24080a9 | 2010-07-10 20:31:42 +0000 | [diff] [blame] | 624 | // If we are introducing accumulator recursion to eliminate operations after |
| 625 | // the call instruction that are both associative and commutative, the initial |
| 626 | // value for the accumulator is placed in this variable. If this value is set |
| 627 | // then we actually perform accumulator recursion elimination instead of |
Duncan Sands | d0d3ccc | 2010-07-13 15:41:41 +0000 | [diff] [blame] | 628 | // simple tail recursion elimination. If the operation is an LLVM instruction |
| 629 | // (eg: "add") then it is recorded in AccumulatorRecursionInstr. If not, then |
| 630 | // we are handling the case when the return instruction returns a constant C |
| 631 | // which is different to the constant returned by other return instructions |
| 632 | // (which is recorded in AccumulatorRecursionEliminationInitVal). This is a |
| 633 | // special case of accumulator recursion, the operation being "return C". |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 634 | Value *AccumulatorRecursionEliminationInitVal = nullptr; |
| 635 | Instruction *AccumulatorRecursionInstr = nullptr; |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 636 | |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 637 | // Ok, we found a potential tail call. We can currently only transform the |
| 638 | // tail call if all of the instructions between the call and the return are |
| 639 | // movable to above the call itself, leaving the call next to the return. |
| 640 | // Check that this is the case now. |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 641 | BasicBlock::iterator BBI = CI; |
| 642 | for (++BBI; &*BBI != Ret; ++BBI) { |
Chris Lattner | b5d84d1 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 643 | if (CanMoveAboveCall(BBI, CI)) continue; |
Nadav Rotem | a94d6e8 | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 644 | |
Chris Lattner | b5d84d1 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 645 | // If we can't move the instruction above the call, it might be because it |
Chris Lattner | 7a2bdde | 2011-04-15 05:18:47 +0000 | [diff] [blame] | 646 | // is an associative and commutative operation that could be transformed |
Chris Lattner | b5d84d1 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 647 | // using accumulator recursion elimination. Check to see if this is the |
| 648 | // case, and if so, remember the initial accumulator value for later. |
| 649 | if ((AccumulatorRecursionEliminationInitVal = |
| 650 | CanTransformAccumulatorRecursion(BBI, CI))) { |
| 651 | // Yes, this is accumulator recursion. Remember which instruction |
| 652 | // accumulates. |
| 653 | AccumulatorRecursionInstr = BBI; |
| 654 | } else { |
| 655 | return false; // Otherwise, we cannot eliminate the tail recursion! |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 656 | } |
Chris Lattner | b5d84d1 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 657 | } |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 658 | |
| 659 | // We can only transform call/return pairs that either ignore the return value |
Chris Lattner | d64152a | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 660 | // of the call and return void, ignore the value of the call and return a |
| 661 | // constant, return the value returned by the tail call, or that are being |
| 662 | // accumulator recursion variable eliminated. |
Devang Patel | 826c491 | 2008-03-11 17:33:32 +0000 | [diff] [blame] | 663 | if (Ret->getNumOperands() == 1 && Ret->getReturnValue() != CI && |
Chris Lattner | 3b5f450 | 2005-11-05 08:21:11 +0000 | [diff] [blame] | 664 | !isa<UndefValue>(Ret->getReturnValue()) && |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 665 | AccumulatorRecursionEliminationInitVal == nullptr && |
| 666 | !getCommonReturnValue(nullptr, CI)) { |
Duncan Sands | d0d3ccc | 2010-07-13 15:41:41 +0000 | [diff] [blame] | 667 | // One case remains that we are able to handle: the current return |
| 668 | // instruction returns a constant, and all other return instructions |
| 669 | // return a different constant. |
| 670 | if (!isDynamicConstant(Ret->getReturnValue(), CI, Ret)) |
| 671 | return false; // Current return instruction does not return a constant. |
| 672 | // Check that all other return instructions return a common constant. If |
| 673 | // so, record it in AccumulatorRecursionEliminationInitVal. |
| 674 | AccumulatorRecursionEliminationInitVal = getCommonReturnValue(Ret, CI); |
| 675 | if (!AccumulatorRecursionEliminationInitVal) |
| 676 | return false; |
| 677 | } |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 678 | |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 679 | BasicBlock *BB = Ret->getParent(); |
| 680 | Function *F = BB->getParent(); |
| 681 | |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 682 | emitOptimizationRemark(F->getContext(), "tailcallelim", *F, CI->getDebugLoc(), |
| 683 | "transforming tail recursion to loop"); |
| 684 | |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 685 | // OK! We can transform this tail call. If this is the first one found, |
| 686 | // create the new entry block, allowing us to branch back to the old entry. |
Stephen Hines | dce4a40 | 2014-05-29 02:49:00 -0700 | [diff] [blame^] | 687 | if (!OldEntry) { |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 688 | OldEntry = &F->getEntryBlock(); |
Owen Anderson | 1d0be15 | 2009-08-13 21:58:54 +0000 | [diff] [blame] | 689 | BasicBlock *NewEntry = BasicBlock::Create(F->getContext(), "", F, OldEntry); |
Chris Lattner | 6934a04 | 2007-02-11 01:23:03 +0000 | [diff] [blame] | 690 | NewEntry->takeName(OldEntry); |
| 691 | OldEntry->setName("tailrecurse"); |
Gabor Greif | 051a950 | 2008-04-06 20:25:17 +0000 | [diff] [blame] | 692 | BranchInst::Create(OldEntry, NewEntry); |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 693 | |
Chris Lattner | ce869ee | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 694 | // If this tail call is marked 'tail' and if there are any allocas in the |
| 695 | // entry block, move them up to the new entry block. |
| 696 | TailCallsAreMarkedTail = CI->isTailCall(); |
| 697 | if (TailCallsAreMarkedTail) |
| 698 | // Move all fixed sized allocas from OldEntry to NewEntry. |
| 699 | for (BasicBlock::iterator OEBI = OldEntry->begin(), E = OldEntry->end(), |
| 700 | NEBI = NewEntry->begin(); OEBI != E; ) |
| 701 | if (AllocaInst *AI = dyn_cast<AllocaInst>(OEBI++)) |
| 702 | if (isa<ConstantInt>(AI->getArraySize())) |
Chris Lattner | 4bc5f80 | 2005-08-08 19:11:57 +0000 | [diff] [blame] | 703 | AI->moveBefore(NEBI); |
Chris Lattner | ce869ee | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 704 | |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 705 | // Now that we have created a new block, which jumps to the entry |
| 706 | // block, insert a PHI node for each argument of the function. |
| 707 | // For now, we initialize each PHI to only have the real arguments |
| 708 | // which are passed in. |
| 709 | Instruction *InsertPos = OldEntry->begin(); |
Chris Lattner | 7f78f21 | 2005-05-09 23:51:13 +0000 | [diff] [blame] | 710 | for (Function::arg_iterator I = F->arg_begin(), E = F->arg_end(); |
| 711 | I != E; ++I) { |
Jay Foad | 3ecfc86 | 2011-03-30 11:28:46 +0000 | [diff] [blame] | 712 | PHINode *PN = PHINode::Create(I->getType(), 2, |
Gabor Greif | b1dbcd8 | 2008-05-15 10:04:30 +0000 | [diff] [blame] | 713 | I->getName() + ".tr", InsertPos); |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 714 | I->replaceAllUsesWith(PN); // Everyone use the PHI node now! |
| 715 | PN->addIncoming(I, NewEntry); |
| 716 | ArgumentPHIs.push_back(PN); |
| 717 | } |
| 718 | } |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 719 | |
Chris Lattner | ce869ee | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 720 | // If this function has self recursive calls in the tail position where some |
| 721 | // are marked tail and some are not, only transform one flavor or another. We |
| 722 | // have to choose whether we move allocas in the entry block to the new entry |
| 723 | // block or not, so we can't make a good choice for both. NOTE: We could do |
| 724 | // slightly better here in the case that the function has no entry block |
| 725 | // allocas. |
| 726 | if (TailCallsAreMarkedTail && !CI->isTailCall()) |
| 727 | return false; |
| 728 | |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 729 | // Ok, now that we know we have a pseudo-entry block WITH all of the |
| 730 | // required PHI nodes, add entries into the PHI node for the actual |
| 731 | // parameters passed into the tail-recursive call. |
Gabor Greif | 407014f | 2010-06-24 00:48:48 +0000 | [diff] [blame] | 732 | for (unsigned i = 0, e = CI->getNumArgOperands(); i != e; ++i) |
Gabor Greif | de9f545 | 2010-06-24 00:44:01 +0000 | [diff] [blame] | 733 | ArgumentPHIs[i]->addIncoming(CI->getArgOperand(i), BB); |
Misha Brukman | fd93908 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 734 | |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 735 | // If we are introducing an accumulator variable to eliminate the recursion, |
| 736 | // do so now. Note that we _know_ that no subsequent tail recursion |
| 737 | // eliminations will happen on this function because of the way the |
| 738 | // accumulator recursion predicate is set up. |
| 739 | // |
| 740 | if (AccumulatorRecursionEliminationInitVal) { |
| 741 | Instruction *AccRecInstr = AccumulatorRecursionInstr; |
| 742 | // Start by inserting a new PHI node for the accumulator. |
Jay Foad | d8b4fb4 | 2011-03-30 11:19:20 +0000 | [diff] [blame] | 743 | pred_iterator PB = pred_begin(OldEntry), PE = pred_end(OldEntry); |
Duncan Sands | d0d3ccc | 2010-07-13 15:41:41 +0000 | [diff] [blame] | 744 | PHINode *AccPN = |
| 745 | PHINode::Create(AccumulatorRecursionEliminationInitVal->getType(), |
Jay Foad | 3ecfc86 | 2011-03-30 11:28:46 +0000 | [diff] [blame] | 746 | std::distance(PB, PE) + 1, |
Duncan Sands | d0d3ccc | 2010-07-13 15:41:41 +0000 | [diff] [blame] | 747 | "accumulator.tr", OldEntry->begin()); |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 748 | |
| 749 | // Loop over all of the predecessors of the tail recursion block. For the |
| 750 | // real entry into the function we seed the PHI with the initial value, |
| 751 | // computed earlier. For any other existing branches to this block (due to |
| 752 | // other tail recursions eliminated) the accumulator is not modified. |
| 753 | // Because we haven't added the branch in the current block to OldEntry yet, |
| 754 | // it will not show up as a predecessor. |
Jay Foad | d8b4fb4 | 2011-03-30 11:19:20 +0000 | [diff] [blame] | 755 | for (pred_iterator PI = PB; PI != PE; ++PI) { |
Gabor Greif | a8b9df7 | 2010-07-12 10:36:48 +0000 | [diff] [blame] | 756 | BasicBlock *P = *PI; |
| 757 | if (P == &F->getEntryBlock()) |
| 758 | AccPN->addIncoming(AccumulatorRecursionEliminationInitVal, P); |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 759 | else |
Gabor Greif | a8b9df7 | 2010-07-12 10:36:48 +0000 | [diff] [blame] | 760 | AccPN->addIncoming(AccPN, P); |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 761 | } |
| 762 | |
Duncan Sands | d0d3ccc | 2010-07-13 15:41:41 +0000 | [diff] [blame] | 763 | if (AccRecInstr) { |
| 764 | // Add an incoming argument for the current block, which is computed by |
| 765 | // our associative and commutative accumulator instruction. |
| 766 | AccPN->addIncoming(AccRecInstr, BB); |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 767 | |
Duncan Sands | d0d3ccc | 2010-07-13 15:41:41 +0000 | [diff] [blame] | 768 | // Next, rewrite the accumulator recursion instruction so that it does not |
| 769 | // use the result of the call anymore, instead, use the PHI node we just |
| 770 | // inserted. |
| 771 | AccRecInstr->setOperand(AccRecInstr->getOperand(0) != CI, AccPN); |
| 772 | } else { |
| 773 | // Add an incoming argument for the current block, which is just the |
| 774 | // constant returned by the current return instruction. |
| 775 | AccPN->addIncoming(Ret->getReturnValue(), BB); |
| 776 | } |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 777 | |
| 778 | // Finally, rewrite any return instructions in the program to return the PHI |
| 779 | // node instead of the "initval" that they do currently. This loop will |
| 780 | // actually rewrite the return value we are destroying, but that's ok. |
| 781 | for (Function::iterator BBI = F->begin(), E = F->end(); BBI != E; ++BBI) |
| 782 | if (ReturnInst *RI = dyn_cast<ReturnInst>(BBI->getTerminator())) |
| 783 | RI->setOperand(0, AccPN); |
| 784 | ++NumAccumAdded; |
| 785 | } |
| 786 | |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 787 | // Now that all of the PHI nodes are in place, remove the call and |
| 788 | // ret instructions, replacing them with an unconditional branch. |
Devang Patel | 81199d2 | 2011-04-28 18:43:39 +0000 | [diff] [blame] | 789 | BranchInst *NewBI = BranchInst::Create(OldEntry, Ret); |
| 790 | NewBI->setDebugLoc(CI->getDebugLoc()); |
| 791 | |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 792 | BB->getInstList().erase(Ret); // Remove return. |
| 793 | BB->getInstList().erase(CI); // Remove call. |
Chris Lattner | 543d622 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 794 | ++NumEliminated; |
Chris Lattner | 7152da3 | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 795 | return true; |
| 796 | } |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 797 | |
| 798 | bool TailCallElim::FoldReturnAndProcessPred(BasicBlock *BB, |
| 799 | ReturnInst *Ret, BasicBlock *&OldEntry, |
| 800 | bool &TailCallsAreMarkedTail, |
Craig Topper | a0ec3f9 | 2013-07-14 04:42:23 +0000 | [diff] [blame] | 801 | SmallVectorImpl<PHINode *> &ArgumentPHIs, |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 802 | bool CannotTailCallElimCallsMarkedTail) { |
| 803 | bool Change = false; |
| 804 | |
| 805 | // If the return block contains nothing but the return and PHI's, |
| 806 | // there might be an opportunity to duplicate the return in its |
| 807 | // predecessors and perform TRC there. Look for predecessors that end |
| 808 | // in unconditional branch and recursive call(s). |
| 809 | SmallVector<BranchInst*, 8> UncondBranchPreds; |
| 810 | for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI) { |
| 811 | BasicBlock *Pred = *PI; |
| 812 | TerminatorInst *PTI = Pred->getTerminator(); |
| 813 | if (BranchInst *BI = dyn_cast<BranchInst>(PTI)) |
| 814 | if (BI->isUnconditional()) |
| 815 | UncondBranchPreds.push_back(BI); |
| 816 | } |
| 817 | |
| 818 | while (!UncondBranchPreds.empty()) { |
| 819 | BranchInst *BI = UncondBranchPreds.pop_back_val(); |
| 820 | BasicBlock *Pred = BI->getParent(); |
| 821 | if (CallInst *CI = FindTRECandidate(BI, CannotTailCallElimCallsMarkedTail)){ |
| 822 | DEBUG(dbgs() << "FOLDING: " << *BB |
| 823 | << "INTO UNCOND BRANCH PRED: " << *Pred); |
| 824 | EliminateRecursiveTailCall(CI, FoldReturnIntoUncondBranch(Ret, BB, Pred), |
| 825 | OldEntry, TailCallsAreMarkedTail, ArgumentPHIs, |
| 826 | CannotTailCallElimCallsMarkedTail); |
Evan Cheng | 60f5ad4 | 2011-01-29 04:53:35 +0000 | [diff] [blame] | 827 | ++NumRetDuped; |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 828 | Change = true; |
| 829 | } |
| 830 | } |
| 831 | |
| 832 | return Change; |
| 833 | } |
| 834 | |
Craig Topper | a0ec3f9 | 2013-07-14 04:42:23 +0000 | [diff] [blame] | 835 | bool |
| 836 | TailCallElim::ProcessReturningBlock(ReturnInst *Ret, BasicBlock *&OldEntry, |
| 837 | bool &TailCallsAreMarkedTail, |
| 838 | SmallVectorImpl<PHINode *> &ArgumentPHIs, |
| 839 | bool CannotTailCallElimCallsMarkedTail) { |
Evan Cheng | c3f507f | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 840 | CallInst *CI = FindTRECandidate(Ret, CannotTailCallElimCallsMarkedTail); |
| 841 | if (!CI) |
| 842 | return false; |
| 843 | |
| 844 | return EliminateRecursiveTailCall(CI, Ret, OldEntry, TailCallsAreMarkedTail, |
| 845 | ArgumentPHIs, |
| 846 | CannotTailCallElimCallsMarkedTail); |
| 847 | } |