| Chris Lattner | 2e9014c | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 1 | //===- TailRecursionElimination.cpp - Eliminate Tail Calls ----------------===// |
| Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 2 | // |
| Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 6 | // |
| John Criswell | 482202a | 2003-10-20 19:43:21 +0000 | [diff] [blame] | 7 | //===----------------------------------------------------------------------===// |
| Chris Lattner | 2e9014c | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 8 | // |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 9 | // This file transforms calls of the current function (self recursion) followed |
| 10 | // by a return instruction with a branch to the entry of the function, creating |
| 11 | // a loop. This pass also implements the following extensions to the basic |
| 12 | // algorithm: |
| Chris Lattner | 2e9014c | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 13 | // |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 14 | // 1. Trivial instructions between the call and return do not prevent the |
| 15 | // transformation from taking place, though currently the analysis cannot |
| 16 | // support moving any really useful instructions (only dead ones). |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 17 | // 2. This pass transforms functions that are prevented from being tail |
| Duncan Sands | 82b21c0 | 2010-07-10 20:31:42 +0000 | [diff] [blame] | 18 | // recursive by an associative and commutative expression to use an |
| 19 | // accumulator variable, thus compiling the typical naive factorial or |
| 20 | // 'fib' implementation into efficient code. |
| Chris Lattner | 884e824 | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 21 | // 3. TRE is performed if the function returns void, if the return |
| 22 | // returns the result returned by the call, or if the function returns a |
| 23 | // run-time constant on all exits from the function. It is possible, though |
| 24 | // unlikely, that the return returns something else (like constant 0), and |
| 25 | // can still be TRE'd. It can be TRE'd if ALL OTHER return instructions in |
| 26 | // the function return the exact same value. |
| Nick Lewycky | 5091272 | 2009-11-07 07:10:01 +0000 | [diff] [blame] | 27 | // 4. If it can prove that callees do not access their caller stack frame, |
| Chris Lattner | bfc796f | 2005-05-09 23:51:13 +0000 | [diff] [blame] | 28 | // they are marked as eligible for tail call elimination (by the code |
| 29 | // generator). |
| Chris Lattner | 2e9014c | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 30 | // |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 31 | // There are several improvements that could be made: |
| 32 | // |
| 33 | // 1. If the function has any alloca instructions, these instructions will be |
| 34 | // moved out of the entry block of the function, causing them to be |
| 35 | // evaluated each time through the tail recursion. Safely keeping allocas |
| 36 | // in the entry block requires analysis to proves that the tail-called |
| 37 | // function does not read or write the stack object. |
| Chris Lattner | 0ab5e2c | 2011-04-15 05:18:47 +0000 | [diff] [blame] | 38 | // 2. Tail recursion is only performed if the call immediately precedes the |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 39 | // return instruction. It's possible that there could be a jump between |
| 40 | // the call and the return. |
| Chris Lattner | 884e824 | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 41 | // 3. There can be intervening operations between the call and the return that |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 42 | // prevent the TRE from occurring. For example, there could be GEP's and |
| 43 | // stores to memory that will not be read or written by the call. This |
| 44 | // requires some substantial analysis (such as with DSA) to prove safe to |
| 45 | // move ahead of the call, but doing so could allow many more TREs to be |
| 46 | // performed, for example in TreeAdd/TreeAlloc from the treeadd benchmark. |
| Chris Lattner | bfc796f | 2005-05-09 23:51:13 +0000 | [diff] [blame] | 47 | // 4. The algorithm we use to detect if callees access their caller stack |
| 48 | // frames is very primitive. |
| Chris Lattner | 2e9014c | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 49 | // |
| 50 | //===----------------------------------------------------------------------===// |
| 51 | |
| Sean Silva | 59fe82f | 2016-07-06 23:48:41 +0000 | [diff] [blame] | 52 | #include "llvm/Transforms/Scalar/TailRecursionElimination.h" |
| Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 53 | #include "llvm/ADT/STLExtras.h" |
| Michael Gottesman | b40db26 | 2013-07-11 04:40:01 +0000 | [diff] [blame] | 54 | #include "llvm/ADT/SmallPtrSet.h" |
| Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 55 | #include "llvm/ADT/Statistic.h" |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 56 | #include "llvm/Analysis/CFG.h" |
| Benjamin Kramer | 799003b | 2015-03-23 19:32:43 +0000 | [diff] [blame] | 57 | #include "llvm/Analysis/CaptureTracking.h" |
| Richard Trieu | 5f436fc | 2019-02-06 02:52:52 +0000 | [diff] [blame] | 58 | #include "llvm/Analysis/DomTreeUpdater.h" |
| Chandler Carruth | 6bda14b | 2017-06-06 11:49:48 +0000 | [diff] [blame] | 59 | #include "llvm/Analysis/GlobalsModRef.h" |
| Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 60 | #include "llvm/Analysis/InlineCost.h" |
| 61 | #include "llvm/Analysis/InstructionSimplify.h" |
| 62 | #include "llvm/Analysis/Loads.h" |
| Adam Nemet | 0965da2 | 2017-10-09 23:19:02 +0000 | [diff] [blame] | 63 | #include "llvm/Analysis/OptimizationRemarkEmitter.h" |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 64 | #include "llvm/Analysis/PostDominators.h" |
| Chandler Carruth | 0ba8db4 | 2013-01-22 11:26:02 +0000 | [diff] [blame] | 65 | #include "llvm/Analysis/TargetTransformInfo.h" |
| Chandler Carruth | 1305dc3 | 2014-03-04 11:45:46 +0000 | [diff] [blame] | 66 | #include "llvm/IR/CFG.h" |
| Chandler Carruth | 219b89b | 2014-03-04 11:01:28 +0000 | [diff] [blame] | 67 | #include "llvm/IR/CallSite.h" |
| Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 68 | #include "llvm/IR/Constants.h" |
| Chandler Carruth | a801dd5 | 2014-10-19 08:17:50 +0000 | [diff] [blame] | 69 | #include "llvm/IR/DataLayout.h" |
| Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 70 | #include "llvm/IR/DerivedTypes.h" |
| Diego Novillo | 7f8af8b | 2014-05-22 14:19:46 +0000 | [diff] [blame] | 71 | #include "llvm/IR/DiagnosticInfo.h" |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 72 | #include "llvm/IR/Dominators.h" |
| Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 73 | #include "llvm/IR/Function.h" |
| Davide Italiano | 6da7db3 | 2017-07-18 15:42:59 +0000 | [diff] [blame] | 74 | #include "llvm/IR/InstIterator.h" |
| Chandler Carruth | 9fb823b | 2013-01-02 11:36:10 +0000 | [diff] [blame] | 75 | #include "llvm/IR/Instructions.h" |
| 76 | #include "llvm/IR/IntrinsicInst.h" |
| 77 | #include "llvm/IR/Module.h" |
| Chandler Carruth | 4220e9c | 2014-03-04 11:17:44 +0000 | [diff] [blame] | 78 | #include "llvm/IR/ValueHandle.h" |
| Chris Lattner | 2e9014c | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 79 | #include "llvm/Pass.h" |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 80 | #include "llvm/Support/Debug.h" |
| Francois Pichet | 326e4a2 | 2011-01-29 20:06:16 +0000 | [diff] [blame] | 81 | #include "llvm/Support/raw_ostream.h" |
| Chandler Carruth | 6bda14b | 2017-06-06 11:49:48 +0000 | [diff] [blame] | 82 | #include "llvm/Transforms/Scalar.h" |
| Chandler Carruth | ed0881b | 2012-12-03 16:50:05 +0000 | [diff] [blame] | 83 | #include "llvm/Transforms/Utils/BasicBlockUtils.h" |
| Chris Lattner | 2af5172 | 2003-11-20 18:25:24 +0000 | [diff] [blame] | 84 | using namespace llvm; |
| Brian Gaeke | 960707c | 2003-11-11 22:41:34 +0000 | [diff] [blame] | 85 | |
| Chandler Carruth | 964daaa | 2014-04-22 02:55:47 +0000 | [diff] [blame] | 86 | #define DEBUG_TYPE "tailcallelim" |
| 87 | |
| Chris Lattner | 79a42ac | 2006-12-19 21:40:18 +0000 | [diff] [blame] | 88 | STATISTIC(NumEliminated, "Number of tail calls removed"); |
| Evan Cheng | 73c29178 | 2011-01-29 04:53:35 +0000 | [diff] [blame] | 89 | STATISTIC(NumRetDuped, "Number of return duplicated"); |
| Chris Lattner | 79a42ac | 2006-12-19 21:40:18 +0000 | [diff] [blame] | 90 | STATISTIC(NumAccumAdded, "Number of accumulators introduced"); |
| Chris Lattner | 2e9014c | 2003-09-20 05:03:31 +0000 | [diff] [blame] | 91 | |
| Adrian Prantl | 5f8f34e4 | 2018-05-01 15:54:18 +0000 | [diff] [blame] | 92 | /// Scan the specified function for alloca instructions. |
| Richard Smith | c45f3f7 | 2014-05-08 01:08:43 +0000 | [diff] [blame] | 93 | /// If it contains any dynamic allocas, returns false. |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 94 | static bool canTRE(Function &F) { |
| Richard Smith | c45f3f7 | 2014-05-08 01:08:43 +0000 | [diff] [blame] | 95 | // Because of PR962, we don't TRE dynamic allocas. |
| Davide Italiano | 6da7db3 | 2017-07-18 15:42:59 +0000 | [diff] [blame] | 96 | return llvm::all_of(instructions(F), [](Instruction &I) { |
| 97 | auto *AI = dyn_cast<AllocaInst>(&I); |
| 98 | return !AI || AI->isStaticAlloca(); |
| 99 | }); |
| Nick Lewycky | 9b669b3 | 2009-11-07 07:42:38 +0000 | [diff] [blame] | 100 | } |
| 101 | |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 102 | namespace { |
| 103 | struct AllocaDerivedValueTracker { |
| 104 | // Start at a root value and walk its use-def chain to mark calls that use the |
| 105 | // value or a derived value in AllocaUsers, and places where it may escape in |
| 106 | // EscapePoints. |
| 107 | void walk(Value *Root) { |
| 108 | SmallVector<Use *, 32> Worklist; |
| 109 | SmallPtrSet<Use *, 32> Visited; |
| 110 | |
| 111 | auto AddUsesToWorklist = [&](Value *V) { |
| 112 | for (auto &U : V->uses()) { |
| David Blaikie | 70573dc | 2014-11-19 07:49:26 +0000 | [diff] [blame] | 113 | if (!Visited.insert(&U).second) |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 114 | continue; |
| 115 | Worklist.push_back(&U); |
| 116 | } |
| 117 | }; |
| 118 | |
| 119 | AddUsesToWorklist(Root); |
| 120 | |
| 121 | while (!Worklist.empty()) { |
| 122 | Use *U = Worklist.pop_back_val(); |
| 123 | Instruction *I = cast<Instruction>(U->getUser()); |
| 124 | |
| 125 | switch (I->getOpcode()) { |
| 126 | case Instruction::Call: |
| 127 | case Instruction::Invoke: { |
| 128 | CallSite CS(I); |
| Robert Lougher | 0c93ea2 | 2018-10-08 18:03:40 +0000 | [diff] [blame] | 129 | // If the alloca-derived argument is passed byval it is not an escape |
| 130 | // point, or a use of an alloca. Calling with byval copies the contents |
| 131 | // of the alloca into argument registers or stack slots, which exist |
| 132 | // beyond the lifetime of the current frame. |
| 133 | if (CS.isArgOperand(U) && CS.isByValArgument(CS.getArgumentNo(U))) |
| 134 | continue; |
| David Majnemer | 63ad9e0 | 2015-12-23 09:58:43 +0000 | [diff] [blame] | 135 | bool IsNocapture = |
| 136 | CS.isDataOperand(U) && CS.doesNotCapture(CS.getDataOperandNo(U)); |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 137 | callUsesLocalStack(CS, IsNocapture); |
| 138 | if (IsNocapture) { |
| 139 | // If the alloca-derived argument is passed in as nocapture, then it |
| 140 | // can't propagate to the call's return. That would be capturing. |
| 141 | continue; |
| 142 | } |
| 143 | break; |
| 144 | } |
| 145 | case Instruction::Load: { |
| 146 | // The result of a load is not alloca-derived (unless an alloca has |
| 147 | // otherwise escaped, but this is a local analysis). |
| 148 | continue; |
| 149 | } |
| 150 | case Instruction::Store: { |
| Nick Lewycky | 7185b5d | 2014-05-06 00:46:20 +0000 | [diff] [blame] | 151 | if (U->getOperandNo() == 0) |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 152 | EscapePoints.insert(I); |
| 153 | continue; // Stores have no users to analyze. |
| 154 | } |
| 155 | case Instruction::BitCast: |
| 156 | case Instruction::GetElementPtr: |
| 157 | case Instruction::PHI: |
| 158 | case Instruction::Select: |
| 159 | case Instruction::AddrSpaceCast: |
| 160 | break; |
| 161 | default: |
| 162 | EscapePoints.insert(I); |
| Nick Lewycky | 7185b5d | 2014-05-06 00:46:20 +0000 | [diff] [blame] | 163 | break; |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 164 | } |
| 165 | |
| 166 | AddUsesToWorklist(I); |
| 167 | } |
| 168 | } |
| 169 | |
| 170 | void callUsesLocalStack(CallSite CS, bool IsNocapture) { |
| Nick Lewycky | aba900c | 2014-07-23 06:24:49 +0000 | [diff] [blame] | 171 | // Add it to the list of alloca users. |
| 172 | AllocaUsers.insert(CS.getInstruction()); |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 173 | |
| Nick Lewycky | aba900c | 2014-07-23 06:24:49 +0000 | [diff] [blame] | 174 | // If it's nocapture then it can't capture this alloca. |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 175 | if (IsNocapture) |
| 176 | return; |
| 177 | |
| 178 | // If it can write to memory, it can leak the alloca value. |
| 179 | if (!CS.onlyReadsMemory()) |
| 180 | EscapePoints.insert(CS.getInstruction()); |
| 181 | } |
| 182 | |
| 183 | SmallPtrSet<Instruction *, 32> AllocaUsers; |
| 184 | SmallPtrSet<Instruction *, 32> EscapePoints; |
| 185 | }; |
| Alexander Kornienko | f00654e | 2015-06-23 09:49:53 +0000 | [diff] [blame] | 186 | } |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 187 | |
| Davide Italiano | 4b8c8ea | 2017-07-19 21:13:22 +0000 | [diff] [blame] | 188 | static bool markTails(Function &F, bool &AllCallsAreTailCalls, |
| 189 | OptimizationRemarkEmitter *ORE) { |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 190 | if (F.callsFunctionThatReturnsTwice()) |
| 191 | return false; |
| 192 | AllCallsAreTailCalls = true; |
| 193 | |
| Reid Kleckner | dd3f3ed | 2014-11-04 02:02:14 +0000 | [diff] [blame] | 194 | // The local stack holds all alloca instructions and all byval arguments. |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 195 | AllocaDerivedValueTracker Tracker; |
| Reid Kleckner | dd3f3ed | 2014-11-04 02:02:14 +0000 | [diff] [blame] | 196 | for (Argument &Arg : F.args()) { |
| 197 | if (Arg.hasByValAttr()) |
| 198 | Tracker.walk(&Arg); |
| 199 | } |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 200 | for (auto &BB : F) { |
| 201 | for (auto &I : BB) |
| 202 | if (AllocaInst *AI = dyn_cast<AllocaInst>(&I)) |
| 203 | Tracker.walk(AI); |
| 204 | } |
| 205 | |
| 206 | bool Modified = false; |
| 207 | |
| 208 | // Track whether a block is reachable after an alloca has escaped. Blocks that |
| 209 | // contain the escaping instruction will be marked as being visited without an |
| 210 | // escaped alloca, since that is how the block began. |
| 211 | enum VisitType { |
| 212 | UNVISITED, |
| 213 | UNESCAPED, |
| 214 | ESCAPED |
| 215 | }; |
| 216 | DenseMap<BasicBlock *, VisitType> Visited; |
| 217 | |
| 218 | // We propagate the fact that an alloca has escaped from block to successor. |
| 219 | // Visit the blocks that are propagating the escapedness first. To do this, we |
| 220 | // maintain two worklists. |
| 221 | SmallVector<BasicBlock *, 32> WorklistUnescaped, WorklistEscaped; |
| 222 | |
| 223 | // We may enter a block and visit it thinking that no alloca has escaped yet, |
| 224 | // then see an escape point and go back around a loop edge and come back to |
| 225 | // the same block twice. Because of this, we defer setting tail on calls when |
| 226 | // we first encounter them in a block. Every entry in this list does not |
| 227 | // statically use an alloca via use-def chain analysis, but may find an alloca |
| 228 | // through other means if the block turns out to be reachable after an escape |
| 229 | // point. |
| 230 | SmallVector<CallInst *, 32> DeferredTails; |
| 231 | |
| 232 | BasicBlock *BB = &F.getEntryBlock(); |
| 233 | VisitType Escaped = UNESCAPED; |
| 234 | do { |
| 235 | for (auto &I : *BB) { |
| 236 | if (Tracker.EscapePoints.count(&I)) |
| 237 | Escaped = ESCAPED; |
| 238 | |
| 239 | CallInst *CI = dyn_cast<CallInst>(&I); |
| Florian Hahn | 25ea91a | 2017-11-28 09:32:25 +0000 | [diff] [blame] | 240 | if (!CI || CI->isTailCall() || isa<DbgInfoIntrinsic>(&I)) |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 241 | continue; |
| 242 | |
| Sanjoy Das | e06ef14 | 2016-11-07 21:01:49 +0000 | [diff] [blame] | 243 | bool IsNoTail = CI->isNoTailCall() || CI->hasOperandBundles(); |
| Akira Hatanaka | 5cfcce12 | 2015-11-06 23:55:38 +0000 | [diff] [blame] | 244 | |
| 245 | if (!IsNoTail && CI->doesNotAccessMemory()) { |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 246 | // A call to a readnone function whose arguments are all things computed |
| 247 | // outside this function can be marked tail. Even if you stored the |
| 248 | // alloca address into a global, a readnone function can't load the |
| 249 | // global anyhow. |
| 250 | // |
| 251 | // Note that this runs whether we know an alloca has escaped or not. If |
| 252 | // it has, then we can't trust Tracker.AllocaUsers to be accurate. |
| 253 | bool SafeToTail = true; |
| 254 | for (auto &Arg : CI->arg_operands()) { |
| 255 | if (isa<Constant>(Arg.getUser())) |
| 256 | continue; |
| Reid Kleckner | dd3f3ed | 2014-11-04 02:02:14 +0000 | [diff] [blame] | 257 | if (Argument *A = dyn_cast<Argument>(Arg.getUser())) |
| 258 | if (!A->hasByValAttr()) |
| 259 | continue; |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 260 | SafeToTail = false; |
| 261 | break; |
| 262 | } |
| 263 | if (SafeToTail) { |
| Davide Italiano | 4b8c8ea | 2017-07-19 21:13:22 +0000 | [diff] [blame] | 264 | using namespace ore; |
| Vivek Pandya | 9590658 | 2017-10-11 17:12:59 +0000 | [diff] [blame] | 265 | ORE->emit([&]() { |
| 266 | return OptimizationRemark(DEBUG_TYPE, "tailcall-readnone", CI) |
| 267 | << "marked as tail call candidate (readnone)"; |
| 268 | }); |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 269 | CI->setTailCall(); |
| 270 | Modified = true; |
| 271 | continue; |
| 272 | } |
| 273 | } |
| 274 | |
| Akira Hatanaka | 5cfcce12 | 2015-11-06 23:55:38 +0000 | [diff] [blame] | 275 | if (!IsNoTail && Escaped == UNESCAPED && !Tracker.AllocaUsers.count(CI)) { |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 276 | DeferredTails.push_back(CI); |
| 277 | } else { |
| 278 | AllCallsAreTailCalls = false; |
| 279 | } |
| 280 | } |
| 281 | |
| Duncan P. N. Exon Smith | 6c99015 | 2014-07-21 17:06:51 +0000 | [diff] [blame] | 282 | for (auto *SuccBB : make_range(succ_begin(BB), succ_end(BB))) { |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 283 | auto &State = Visited[SuccBB]; |
| 284 | if (State < Escaped) { |
| 285 | State = Escaped; |
| 286 | if (State == ESCAPED) |
| 287 | WorklistEscaped.push_back(SuccBB); |
| 288 | else |
| 289 | WorklistUnescaped.push_back(SuccBB); |
| 290 | } |
| 291 | } |
| 292 | |
| 293 | if (!WorklistEscaped.empty()) { |
| 294 | BB = WorklistEscaped.pop_back_val(); |
| 295 | Escaped = ESCAPED; |
| 296 | } else { |
| 297 | BB = nullptr; |
| 298 | while (!WorklistUnescaped.empty()) { |
| 299 | auto *NextBB = WorklistUnescaped.pop_back_val(); |
| 300 | if (Visited[NextBB] == UNESCAPED) { |
| 301 | BB = NextBB; |
| 302 | Escaped = UNESCAPED; |
| 303 | break; |
| 304 | } |
| 305 | } |
| 306 | } |
| 307 | } while (BB); |
| 308 | |
| 309 | for (CallInst *CI : DeferredTails) { |
| 310 | if (Visited[CI->getParent()] != ESCAPED) { |
| 311 | // If the escape point was part way through the block, calls after the |
| 312 | // escape point wouldn't have been put into DeferredTails. |
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 313 | LLVM_DEBUG(dbgs() << "Marked as tail call candidate: " << *CI << "\n"); |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 314 | CI->setTailCall(); |
| 315 | Modified = true; |
| 316 | } else { |
| 317 | AllCallsAreTailCalls = false; |
| 318 | } |
| 319 | } |
| 320 | |
| 321 | return Modified; |
| 322 | } |
| 323 | |
| Sanjay Patel | b92e916 | 2015-02-27 17:27:15 +0000 | [diff] [blame] | 324 | /// Return true if it is safe to move the specified |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 325 | /// instruction from after the call to before the call, assuming that all |
| 326 | /// instructions between the call and this instruction are movable. |
| 327 | /// |
| Xin Tong | b412831 | 2017-06-19 15:21:18 +0000 | [diff] [blame] | 328 | static bool canMoveAboveCall(Instruction *I, CallInst *CI, AliasAnalysis *AA) { |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 329 | // FIXME: We can move load/store/call/free instructions above the call if the |
| 330 | // call does not mod/ref the memory location being processed. |
| Chris Lattner | 5ca4197 | 2009-06-19 04:22:16 +0000 | [diff] [blame] | 331 | if (I->mayHaveSideEffects()) // This also handles volatile loads. |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 332 | return false; |
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 333 | |
| Nick Lewycky | 5091272 | 2009-11-07 07:10:01 +0000 | [diff] [blame] | 334 | if (LoadInst *L = dyn_cast<LoadInst>(I)) { |
| Chris Lattner | 5ca4197 | 2009-06-19 04:22:16 +0000 | [diff] [blame] | 335 | // Loads may always be moved above calls without side effects. |
| 336 | if (CI->mayHaveSideEffects()) { |
| 337 | // Non-volatile loads may be moved above a call with side effects if it |
| 338 | // does not write to memory and the load provably won't trap. |
| Xin Tong | b412831 | 2017-06-19 15:21:18 +0000 | [diff] [blame] | 339 | // Writes to memory only matter if they may alias the pointer |
| Chris Lattner | 5ca4197 | 2009-06-19 04:22:16 +0000 | [diff] [blame] | 340 | // being loaded from. |
| Artur Pilipenko | 9bb6bea | 2016-04-27 11:00:48 +0000 | [diff] [blame] | 341 | const DataLayout &DL = L->getModule()->getDataLayout(); |
| Alina Sbirlea | 63d2250 | 2017-12-05 20:12:23 +0000 | [diff] [blame] | 342 | if (isModSet(AA->getModRefInfo(CI, MemoryLocation::get(L))) || |
| Artur Pilipenko | 6dd6969 | 2016-01-15 15:27:46 +0000 | [diff] [blame] | 343 | !isSafeToLoadUnconditionally(L->getPointerOperand(), |
| Artur Pilipenko | 9bb6bea | 2016-04-27 11:00:48 +0000 | [diff] [blame] | 344 | L->getAlignment(), DL, L)) |
| Chris Lattner | 5ca4197 | 2009-06-19 04:22:16 +0000 | [diff] [blame] | 345 | return false; |
| 346 | } |
| 347 | } |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 348 | |
| 349 | // Otherwise, if this is a side-effect free instruction, check to make sure |
| 350 | // that it does not use the return value of the call. If it doesn't use the |
| 351 | // return value of the call, it must only use things that are defined before |
| 352 | // the call, or movable instructions between the call and the instruction |
| 353 | // itself. |
| David Majnemer | 4253126 | 2016-08-12 03:55:06 +0000 | [diff] [blame] | 354 | return !is_contained(I->operands(), CI); |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 355 | } |
| 356 | |
| Sanjay Patel | b92e916 | 2015-02-27 17:27:15 +0000 | [diff] [blame] | 357 | /// Return true if the specified value is the same when the return would exit |
| 358 | /// as it was when the initial iteration of the recursive function was executed. |
| 359 | /// |
| 360 | /// We currently handle static constants and arguments that are not modified as |
| 361 | /// part of the recursion. |
| Nick Lewycky | b939726 | 2009-11-07 21:10:15 +0000 | [diff] [blame] | 362 | static bool isDynamicConstant(Value *V, CallInst *CI, ReturnInst *RI) { |
| Chris Lattner | 884e824 | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 363 | if (isa<Constant>(V)) return true; // Static constants are always dyn consts |
| 364 | |
| 365 | // Check to see if this is an immutable argument, if so, the value |
| 366 | // will be available to initialize the accumulator. |
| 367 | if (Argument *Arg = dyn_cast<Argument>(V)) { |
| 368 | // Figure out which argument number this is... |
| 369 | unsigned ArgNo = 0; |
| 370 | Function *F = CI->getParent()->getParent(); |
| Chris Lattner | 531f9e9 | 2005-03-15 04:54:21 +0000 | [diff] [blame] | 371 | for (Function::arg_iterator AI = F->arg_begin(); &*AI != Arg; ++AI) |
| Chris Lattner | 884e824 | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 372 | ++ArgNo; |
| Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 373 | |
| Chris Lattner | 884e824 | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 374 | // If we are passing this argument into call as the corresponding |
| 375 | // argument operand, then the argument is dynamically constant. |
| 376 | // Otherwise, we cannot transform this function safely. |
| Gabor Greif | 4a39b84 | 2010-06-24 00:44:01 +0000 | [diff] [blame] | 377 | if (CI->getArgOperand(ArgNo) == Arg) |
| Chris Lattner | 884e824 | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 378 | return true; |
| 379 | } |
| Nick Lewycky | b939726 | 2009-11-07 21:10:15 +0000 | [diff] [blame] | 380 | |
| 381 | // Switch cases are always constant integers. If the value is being switched |
| 382 | // on and the return is only reachable from one of its cases, it's |
| 383 | // effectively constant. |
| 384 | if (BasicBlock *UniquePred = RI->getParent()->getUniquePredecessor()) |
| 385 | if (SwitchInst *SI = dyn_cast<SwitchInst>(UniquePred->getTerminator())) |
| 386 | if (SI->getCondition() == V) |
| 387 | return SI->getDefaultDest() != RI->getParent(); |
| 388 | |
| Chris Lattner | 884e824 | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 389 | // Not a constant or immutable argument, we can't safely transform. |
| 390 | return false; |
| 391 | } |
| 392 | |
| Sanjay Patel | b92e916 | 2015-02-27 17:27:15 +0000 | [diff] [blame] | 393 | /// Check to see if the function containing the specified tail call consistently |
| 394 | /// returns the same runtime-constant value at all exit points except for |
| 395 | /// IgnoreRI. If so, return the returned value. |
| Duncan Sands | 3a5cb69 | 2010-06-26 12:53:31 +0000 | [diff] [blame] | 396 | static Value *getCommonReturnValue(ReturnInst *IgnoreRI, CallInst *CI) { |
| 397 | Function *F = CI->getParent()->getParent(); |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 398 | Value *ReturnedValue = nullptr; |
| Chris Lattner | 884e824 | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 399 | |
| Benjamin Kramer | 135f735 | 2016-06-26 12:28:59 +0000 | [diff] [blame] | 400 | for (BasicBlock &BBI : *F) { |
| 401 | ReturnInst *RI = dyn_cast<ReturnInst>(BBI.getTerminator()); |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 402 | if (RI == nullptr || RI == IgnoreRI) continue; |
| Chris Lattner | 884e824 | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 403 | |
| Chris Lattner | daca6f3 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 404 | // We can only perform this transformation if the value returned is |
| 405 | // evaluatable at the start of the initial invocation of the function, |
| 406 | // instead of at the end of the evaluation. |
| 407 | // |
| 408 | Value *RetOp = RI->getOperand(0); |
| 409 | if (!isDynamicConstant(RetOp, CI, RI)) |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 410 | return nullptr; |
| Chris Lattner | 884e824 | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 411 | |
| Chris Lattner | daca6f3 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 412 | if (ReturnedValue && RetOp != ReturnedValue) |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 413 | return nullptr; // Cannot transform if differing values are returned. |
| Chris Lattner | daca6f3 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 414 | ReturnedValue = RetOp; |
| 415 | } |
| Chris Lattner | 884e824 | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 416 | return ReturnedValue; |
| 417 | } |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 418 | |
| Sanjay Patel | b92e916 | 2015-02-27 17:27:15 +0000 | [diff] [blame] | 419 | /// If the specified instruction can be transformed using accumulator recursion |
| 420 | /// elimination, return the constant which is the start of the accumulator |
| 421 | /// value. Otherwise return null. |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 422 | static Value *canTransformAccumulatorRecursion(Instruction *I, CallInst *CI) { |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 423 | if (!I->isAssociative() || !I->isCommutative()) return nullptr; |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 424 | assert(I->getNumOperands() == 2 && |
| Duncan Sands | 82b21c0 | 2010-07-10 20:31:42 +0000 | [diff] [blame] | 425 | "Associative/commutative operations should have 2 args!"); |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 426 | |
| Chris Lattner | daca6f3 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 427 | // Exactly one operand should be the result of the call instruction. |
| Anton Korobeynikov | 1bfd121 | 2008-02-20 11:26:25 +0000 | [diff] [blame] | 428 | if ((I->getOperand(0) == CI && I->getOperand(1) == CI) || |
| 429 | (I->getOperand(0) != CI && I->getOperand(1) != CI)) |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 430 | return nullptr; |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 431 | |
| 432 | // The only user of this instruction we allow is a single return instruction. |
| Chandler Carruth | cdf4788 | 2014-03-09 03:16:01 +0000 | [diff] [blame] | 433 | if (!I->hasOneUse() || !isa<ReturnInst>(I->user_back())) |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 434 | return nullptr; |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 435 | |
| 436 | // Ok, now we have to check all of the other return instructions in this |
| 437 | // function. If they return non-constants or differing values, then we cannot |
| 438 | // transform the function safely. |
| Chandler Carruth | cdf4788 | 2014-03-09 03:16:01 +0000 | [diff] [blame] | 439 | return getCommonReturnValue(cast<ReturnInst>(I->user_back()), CI); |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 440 | } |
| 441 | |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 442 | static Instruction *firstNonDbg(BasicBlock::iterator I) { |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 443 | while (isa<DbgInfoIntrinsic>(I)) |
| 444 | ++I; |
| 445 | return &*I; |
| 446 | } |
| 447 | |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 448 | static CallInst *findTRECandidate(Instruction *TI, |
| 449 | bool CannotTailCallElimCallsMarkedTail, |
| 450 | const TargetTransformInfo *TTI) { |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 451 | BasicBlock *BB = TI->getParent(); |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 452 | Function *F = BB->getParent(); |
| 453 | |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 454 | if (&BB->front() == TI) // Make sure there is something before the terminator. |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 455 | return nullptr; |
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 456 | |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 457 | // Scan backwards from the return, checking to see if there is a tail call in |
| 458 | // this block. If so, set CI to it. |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 459 | CallInst *CI = nullptr; |
| Duncan P. N. Exon Smith | be4d8cb | 2015-10-13 19:26:58 +0000 | [diff] [blame] | 460 | BasicBlock::iterator BBI(TI); |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 461 | while (true) { |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 462 | CI = dyn_cast<CallInst>(BBI); |
| 463 | if (CI && CI->getCalledFunction() == F) |
| 464 | break; |
| 465 | |
| 466 | if (BBI == BB->begin()) |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 467 | return nullptr; // Didn't find a potential tail call. |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 468 | --BBI; |
| 469 | } |
| 470 | |
| Chris Lattner | f4dd8c4 | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 471 | // If this call is marked as a tail call, and if there are dynamic allocas in |
| 472 | // the function, we cannot perform this optimization. |
| 473 | if (CI->isTailCall() && CannotTailCallElimCallsMarkedTail) |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 474 | return nullptr; |
| Chris Lattner | f4dd8c4 | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 475 | |
| Dan Gohman | 99e5327 | 2010-04-16 15:57:50 +0000 | [diff] [blame] | 476 | // As a special case, detect code like this: |
| 477 | // double fabs(double f) { return __builtin_fabs(f); } // a 'fabs' call |
| 478 | // and disable this xform in this case, because the code generator will |
| 479 | // lower the call to fabs into inline code. |
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 480 | if (BB == &F->getEntryBlock() && |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 481 | firstNonDbg(BB->front().getIterator()) == CI && |
| 482 | firstNonDbg(std::next(BB->begin())) == TI && CI->getCalledFunction() && |
| Chandler Carruth | 0ba8db4 | 2013-01-22 11:26:02 +0000 | [diff] [blame] | 483 | !TTI->isLoweredToCall(CI->getCalledFunction())) { |
| Dan Gohman | 99e5327 | 2010-04-16 15:57:50 +0000 | [diff] [blame] | 484 | // A single-block function with just a call and a return. Check that |
| 485 | // the arguments match. |
| 486 | CallSite::arg_iterator I = CallSite(CI).arg_begin(), |
| 487 | E = CallSite(CI).arg_end(); |
| 488 | Function::arg_iterator FI = F->arg_begin(), |
| 489 | FE = F->arg_end(); |
| 490 | for (; I != E && FI != FE; ++I, ++FI) |
| 491 | if (*I != &*FI) break; |
| 492 | if (I == E && FI == FE) |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 493 | return nullptr; |
| Dan Gohman | 99e5327 | 2010-04-16 15:57:50 +0000 | [diff] [blame] | 494 | } |
| 495 | |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 496 | return CI; |
| 497 | } |
| 498 | |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 499 | static bool eliminateRecursiveTailCall( |
| 500 | CallInst *CI, ReturnInst *Ret, BasicBlock *&OldEntry, |
| 501 | bool &TailCallsAreMarkedTail, SmallVectorImpl<PHINode *> &ArgumentPHIs, |
| 502 | AliasAnalysis *AA, OptimizationRemarkEmitter *ORE, DomTreeUpdater &DTU) { |
| Duncan Sands | 82b21c0 | 2010-07-10 20:31:42 +0000 | [diff] [blame] | 503 | // If we are introducing accumulator recursion to eliminate operations after |
| 504 | // the call instruction that are both associative and commutative, the initial |
| 505 | // value for the accumulator is placed in this variable. If this value is set |
| 506 | // then we actually perform accumulator recursion elimination instead of |
| Duncan Sands | f88a284 | 2010-07-13 15:41:41 +0000 | [diff] [blame] | 507 | // simple tail recursion elimination. If the operation is an LLVM instruction |
| 508 | // (eg: "add") then it is recorded in AccumulatorRecursionInstr. If not, then |
| 509 | // we are handling the case when the return instruction returns a constant C |
| 510 | // which is different to the constant returned by other return instructions |
| 511 | // (which is recorded in AccumulatorRecursionEliminationInitVal). This is a |
| 512 | // special case of accumulator recursion, the operation being "return C". |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 513 | Value *AccumulatorRecursionEliminationInitVal = nullptr; |
| 514 | Instruction *AccumulatorRecursionInstr = nullptr; |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 515 | |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 516 | // Ok, we found a potential tail call. We can currently only transform the |
| 517 | // tail call if all of the instructions between the call and the return are |
| 518 | // movable to above the call itself, leaving the call next to the return. |
| 519 | // Check that this is the case now. |
| Duncan P. N. Exon Smith | be4d8cb | 2015-10-13 19:26:58 +0000 | [diff] [blame] | 520 | BasicBlock::iterator BBI(CI); |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 521 | for (++BBI; &*BBI != Ret; ++BBI) { |
| Xin Tong | b412831 | 2017-06-19 15:21:18 +0000 | [diff] [blame] | 522 | if (canMoveAboveCall(&*BBI, CI, AA)) |
| 523 | continue; |
| Nadav Rotem | 465834c | 2012-07-24 10:51:42 +0000 | [diff] [blame] | 524 | |
| Chris Lattner | daca6f3 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 525 | // If we can't move the instruction above the call, it might be because it |
| Chris Lattner | 0ab5e2c | 2011-04-15 05:18:47 +0000 | [diff] [blame] | 526 | // is an associative and commutative operation that could be transformed |
| Chris Lattner | daca6f3 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 527 | // using accumulator recursion elimination. Check to see if this is the |
| 528 | // case, and if so, remember the initial accumulator value for later. |
| 529 | if ((AccumulatorRecursionEliminationInitVal = |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 530 | canTransformAccumulatorRecursion(&*BBI, CI))) { |
| Chris Lattner | daca6f3 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 531 | // Yes, this is accumulator recursion. Remember which instruction |
| 532 | // accumulates. |
| Duncan P. N. Exon Smith | be4d8cb | 2015-10-13 19:26:58 +0000 | [diff] [blame] | 533 | AccumulatorRecursionInstr = &*BBI; |
| Chris Lattner | daca6f3 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 534 | } else { |
| 535 | return false; // Otherwise, we cannot eliminate the tail recursion! |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 536 | } |
| Chris Lattner | daca6f3 | 2010-08-31 21:21:25 +0000 | [diff] [blame] | 537 | } |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 538 | |
| 539 | // We can only transform call/return pairs that either ignore the return value |
| Chris Lattner | 884e824 | 2003-12-14 23:57:39 +0000 | [diff] [blame] | 540 | // of the call and return void, ignore the value of the call and return a |
| 541 | // constant, return the value returned by the tail call, or that are being |
| 542 | // accumulator recursion variable eliminated. |
| Devang Patel | 5663fe6 | 2008-03-11 17:33:32 +0000 | [diff] [blame] | 543 | if (Ret->getNumOperands() == 1 && Ret->getReturnValue() != CI && |
| Chris Lattner | 16b29e9 | 2005-11-05 08:21:11 +0000 | [diff] [blame] | 544 | !isa<UndefValue>(Ret->getReturnValue()) && |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 545 | AccumulatorRecursionEliminationInitVal == nullptr && |
| 546 | !getCommonReturnValue(nullptr, CI)) { |
| Duncan Sands | f88a284 | 2010-07-13 15:41:41 +0000 | [diff] [blame] | 547 | // One case remains that we are able to handle: the current return |
| 548 | // instruction returns a constant, and all other return instructions |
| 549 | // return a different constant. |
| 550 | if (!isDynamicConstant(Ret->getReturnValue(), CI, Ret)) |
| 551 | return false; // Current return instruction does not return a constant. |
| 552 | // Check that all other return instructions return a common constant. If |
| 553 | // so, record it in AccumulatorRecursionEliminationInitVal. |
| 554 | AccumulatorRecursionEliminationInitVal = getCommonReturnValue(Ret, CI); |
| 555 | if (!AccumulatorRecursionEliminationInitVal) |
| 556 | return false; |
| 557 | } |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 558 | |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 559 | BasicBlock *BB = Ret->getParent(); |
| 560 | Function *F = BB->getParent(); |
| 561 | |
| Davide Italiano | 4b8c8ea | 2017-07-19 21:13:22 +0000 | [diff] [blame] | 562 | using namespace ore; |
| Vivek Pandya | 9590658 | 2017-10-11 17:12:59 +0000 | [diff] [blame] | 563 | ORE->emit([&]() { |
| 564 | return OptimizationRemark(DEBUG_TYPE, "tailcall-recursion", CI) |
| 565 | << "transforming tail recursion into loop"; |
| 566 | }); |
| Nick Lewycky | 5ef6bc8 | 2014-05-05 23:59:03 +0000 | [diff] [blame] | 567 | |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 568 | // OK! We can transform this tail call. If this is the first one found, |
| 569 | // create the new entry block, allowing us to branch back to the old entry. |
| Craig Topper | f40110f | 2014-04-25 05:29:35 +0000 | [diff] [blame] | 570 | if (!OldEntry) { |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 571 | OldEntry = &F->getEntryBlock(); |
| Owen Anderson | 55f1c09 | 2009-08-13 21:58:54 +0000 | [diff] [blame] | 572 | BasicBlock *NewEntry = BasicBlock::Create(F->getContext(), "", F, OldEntry); |
| Chris Lattner | 6e0123b | 2007-02-11 01:23:03 +0000 | [diff] [blame] | 573 | NewEntry->takeName(OldEntry); |
| 574 | OldEntry->setName("tailrecurse"); |
| Anastasis Grammenos | 4dfe279 | 2018-08-03 20:27:13 +0000 | [diff] [blame] | 575 | BranchInst *BI = BranchInst::Create(OldEntry, NewEntry); |
| 576 | BI->setDebugLoc(CI->getDebugLoc()); |
| Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 577 | |
| Chris Lattner | f4dd8c4 | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 578 | // If this tail call is marked 'tail' and if there are any allocas in the |
| 579 | // entry block, move them up to the new entry block. |
| 580 | TailCallsAreMarkedTail = CI->isTailCall(); |
| 581 | if (TailCallsAreMarkedTail) |
| 582 | // Move all fixed sized allocas from OldEntry to NewEntry. |
| 583 | for (BasicBlock::iterator OEBI = OldEntry->begin(), E = OldEntry->end(), |
| 584 | NEBI = NewEntry->begin(); OEBI != E; ) |
| 585 | if (AllocaInst *AI = dyn_cast<AllocaInst>(OEBI++)) |
| 586 | if (isa<ConstantInt>(AI->getArraySize())) |
| Duncan P. N. Exon Smith | be4d8cb | 2015-10-13 19:26:58 +0000 | [diff] [blame] | 587 | AI->moveBefore(&*NEBI); |
| Chris Lattner | f4dd8c4 | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 588 | |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 589 | // Now that we have created a new block, which jumps to the entry |
| 590 | // block, insert a PHI node for each argument of the function. |
| 591 | // For now, we initialize each PHI to only have the real arguments |
| 592 | // which are passed in. |
| Duncan P. N. Exon Smith | be4d8cb | 2015-10-13 19:26:58 +0000 | [diff] [blame] | 593 | Instruction *InsertPos = &OldEntry->front(); |
| Chris Lattner | bfc796f | 2005-05-09 23:51:13 +0000 | [diff] [blame] | 594 | for (Function::arg_iterator I = F->arg_begin(), E = F->arg_end(); |
| 595 | I != E; ++I) { |
| Jay Foad | 5213134 | 2011-03-30 11:28:46 +0000 | [diff] [blame] | 596 | PHINode *PN = PHINode::Create(I->getType(), 2, |
| Gabor Greif | 697e94c | 2008-05-15 10:04:30 +0000 | [diff] [blame] | 597 | I->getName() + ".tr", InsertPos); |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 598 | I->replaceAllUsesWith(PN); // Everyone use the PHI node now! |
| Duncan P. N. Exon Smith | be4d8cb | 2015-10-13 19:26:58 +0000 | [diff] [blame] | 599 | PN->addIncoming(&*I, NewEntry); |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 600 | ArgumentPHIs.push_back(PN); |
| 601 | } |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 602 | // The entry block was changed from OldEntry to NewEntry. |
| 603 | // The forward DominatorTree needs to be recalculated when the EntryBB is |
| 604 | // changed. In this corner-case we recalculate the entire tree. |
| 605 | DTU.recalculate(*NewEntry->getParent()); |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 606 | } |
| Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 607 | |
| Chris Lattner | f4dd8c4 | 2005-08-07 04:27:41 +0000 | [diff] [blame] | 608 | // If this function has self recursive calls in the tail position where some |
| 609 | // are marked tail and some are not, only transform one flavor or another. We |
| 610 | // have to choose whether we move allocas in the entry block to the new entry |
| 611 | // block or not, so we can't make a good choice for both. NOTE: We could do |
| 612 | // slightly better here in the case that the function has no entry block |
| 613 | // allocas. |
| 614 | if (TailCallsAreMarkedTail && !CI->isTailCall()) |
| 615 | return false; |
| 616 | |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 617 | // Ok, now that we know we have a pseudo-entry block WITH all of the |
| 618 | // required PHI nodes, add entries into the PHI node for the actual |
| 619 | // parameters passed into the tail-recursive call. |
| Gabor Greif | 0f60709 | 2010-06-24 00:48:48 +0000 | [diff] [blame] | 620 | for (unsigned i = 0, e = CI->getNumArgOperands(); i != e; ++i) |
| Gabor Greif | 4a39b84 | 2010-06-24 00:44:01 +0000 | [diff] [blame] | 621 | ArgumentPHIs[i]->addIncoming(CI->getArgOperand(i), BB); |
| Misha Brukman | b1c9317 | 2005-04-21 23:48:37 +0000 | [diff] [blame] | 622 | |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 623 | // If we are introducing an accumulator variable to eliminate the recursion, |
| 624 | // do so now. Note that we _know_ that no subsequent tail recursion |
| 625 | // eliminations will happen on this function because of the way the |
| 626 | // accumulator recursion predicate is set up. |
| 627 | // |
| 628 | if (AccumulatorRecursionEliminationInitVal) { |
| 629 | Instruction *AccRecInstr = AccumulatorRecursionInstr; |
| 630 | // Start by inserting a new PHI node for the accumulator. |
| Jay Foad | e0938d8 | 2011-03-30 11:19:20 +0000 | [diff] [blame] | 631 | pred_iterator PB = pred_begin(OldEntry), PE = pred_end(OldEntry); |
| Duncan P. N. Exon Smith | be4d8cb | 2015-10-13 19:26:58 +0000 | [diff] [blame] | 632 | PHINode *AccPN = PHINode::Create( |
| 633 | AccumulatorRecursionEliminationInitVal->getType(), |
| 634 | std::distance(PB, PE) + 1, "accumulator.tr", &OldEntry->front()); |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 635 | |
| 636 | // Loop over all of the predecessors of the tail recursion block. For the |
| 637 | // real entry into the function we seed the PHI with the initial value, |
| 638 | // computed earlier. For any other existing branches to this block (due to |
| 639 | // other tail recursions eliminated) the accumulator is not modified. |
| 640 | // Because we haven't added the branch in the current block to OldEntry yet, |
| 641 | // it will not show up as a predecessor. |
| Jay Foad | e0938d8 | 2011-03-30 11:19:20 +0000 | [diff] [blame] | 642 | for (pred_iterator PI = PB; PI != PE; ++PI) { |
| Gabor Greif | 2a464d7 | 2010-07-12 10:36:48 +0000 | [diff] [blame] | 643 | BasicBlock *P = *PI; |
| 644 | if (P == &F->getEntryBlock()) |
| 645 | AccPN->addIncoming(AccumulatorRecursionEliminationInitVal, P); |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 646 | else |
| Gabor Greif | 2a464d7 | 2010-07-12 10:36:48 +0000 | [diff] [blame] | 647 | AccPN->addIncoming(AccPN, P); |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 648 | } |
| 649 | |
| Duncan Sands | f88a284 | 2010-07-13 15:41:41 +0000 | [diff] [blame] | 650 | if (AccRecInstr) { |
| 651 | // Add an incoming argument for the current block, which is computed by |
| 652 | // our associative and commutative accumulator instruction. |
| 653 | AccPN->addIncoming(AccRecInstr, BB); |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 654 | |
| Duncan Sands | f88a284 | 2010-07-13 15:41:41 +0000 | [diff] [blame] | 655 | // Next, rewrite the accumulator recursion instruction so that it does not |
| 656 | // use the result of the call anymore, instead, use the PHI node we just |
| 657 | // inserted. |
| 658 | AccRecInstr->setOperand(AccRecInstr->getOperand(0) != CI, AccPN); |
| 659 | } else { |
| 660 | // Add an incoming argument for the current block, which is just the |
| 661 | // constant returned by the current return instruction. |
| 662 | AccPN->addIncoming(Ret->getReturnValue(), BB); |
| 663 | } |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 664 | |
| 665 | // Finally, rewrite any return instructions in the program to return the PHI |
| 666 | // node instead of the "initval" that they do currently. This loop will |
| 667 | // actually rewrite the return value we are destroying, but that's ok. |
| Benjamin Kramer | 135f735 | 2016-06-26 12:28:59 +0000 | [diff] [blame] | 668 | for (BasicBlock &BBI : *F) |
| 669 | if (ReturnInst *RI = dyn_cast<ReturnInst>(BBI.getTerminator())) |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 670 | RI->setOperand(0, AccPN); |
| 671 | ++NumAccumAdded; |
| 672 | } |
| 673 | |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 674 | // Now that all of the PHI nodes are in place, remove the call and |
| 675 | // ret instructions, replacing them with an unconditional branch. |
| Devang Patel | 33d87d9 | 2011-04-28 18:43:39 +0000 | [diff] [blame] | 676 | BranchInst *NewBI = BranchInst::Create(OldEntry, Ret); |
| 677 | NewBI->setDebugLoc(CI->getDebugLoc()); |
| 678 | |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 679 | BB->getInstList().erase(Ret); // Remove return. |
| 680 | BB->getInstList().erase(CI); // Remove call. |
| Chijun Sima | f131d61 | 2019-02-22 05:41:43 +0000 | [diff] [blame] | 681 | DTU.applyUpdates({{DominatorTree::Insert, BB, OldEntry}}); |
| Chris Lattner | 198e620 | 2003-12-08 23:19:26 +0000 | [diff] [blame] | 682 | ++NumEliminated; |
| Chris Lattner | a7b6f3a | 2003-12-08 05:34:54 +0000 | [diff] [blame] | 683 | return true; |
| 684 | } |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 685 | |
| Davide Italiano | 4b8c8ea | 2017-07-19 21:13:22 +0000 | [diff] [blame] | 686 | static bool foldReturnAndProcessPred( |
| 687 | BasicBlock *BB, ReturnInst *Ret, BasicBlock *&OldEntry, |
| 688 | bool &TailCallsAreMarkedTail, SmallVectorImpl<PHINode *> &ArgumentPHIs, |
| 689 | bool CannotTailCallElimCallsMarkedTail, const TargetTransformInfo *TTI, |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 690 | AliasAnalysis *AA, OptimizationRemarkEmitter *ORE, DomTreeUpdater &DTU) { |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 691 | bool Change = false; |
| 692 | |
| Xin Tong | 025780b | 2017-06-17 16:55:12 +0000 | [diff] [blame] | 693 | // Make sure this block is a trivial return block. |
| 694 | assert(BB->getFirstNonPHIOrDbg() == Ret && |
| 695 | "Trying to fold non-trivial return block"); |
| 696 | |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 697 | // If the return block contains nothing but the return and PHI's, |
| 698 | // there might be an opportunity to duplicate the return in its |
| Xin Tong | d5b4d0b | 2017-06-17 16:18:36 +0000 | [diff] [blame] | 699 | // predecessors and perform TRE there. Look for predecessors that end |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 700 | // in unconditional branch and recursive call(s). |
| 701 | SmallVector<BranchInst*, 8> UncondBranchPreds; |
| Duncan P. N. Exon Smith | 6c99015 | 2014-07-21 17:06:51 +0000 | [diff] [blame] | 702 | for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI) { |
| 703 | BasicBlock *Pred = *PI; |
| Chandler Carruth | edb12a8 | 2018-10-15 10:04:59 +0000 | [diff] [blame] | 704 | Instruction *PTI = Pred->getTerminator(); |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 705 | if (BranchInst *BI = dyn_cast<BranchInst>(PTI)) |
| 706 | if (BI->isUnconditional()) |
| 707 | UncondBranchPreds.push_back(BI); |
| 708 | } |
| 709 | |
| 710 | while (!UncondBranchPreds.empty()) { |
| 711 | BranchInst *BI = UncondBranchPreds.pop_back_val(); |
| 712 | BasicBlock *Pred = BI->getParent(); |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 713 | if (CallInst *CI = findTRECandidate(BI, CannotTailCallElimCallsMarkedTail, TTI)){ |
| Nicola Zaghen | d34e60c | 2018-05-14 12:53:11 +0000 | [diff] [blame] | 714 | LLVM_DEBUG(dbgs() << "FOLDING: " << *BB |
| 715 | << "INTO UNCOND BRANCH PRED: " << *Pred); |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 716 | ReturnInst *RI = FoldReturnIntoUncondBranch(Ret, BB, Pred, &DTU); |
| Arnaud A. de Grandmaison | 7b9dc28 | 2014-11-19 13:32:51 +0000 | [diff] [blame] | 717 | |
| 718 | // Cleanup: if all predecessors of BB have been eliminated by |
| Benjamin Kramer | cb570f1 | 2015-02-28 16:47:27 +0000 | [diff] [blame] | 719 | // FoldReturnIntoUncondBranch, delete it. It is important to empty it, |
| 720 | // because the ret instruction in there is still using a value which |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 721 | // eliminateRecursiveTailCall will attempt to remove. |
| Arnaud A. de Grandmaison | 7b9dc28 | 2014-11-19 13:32:51 +0000 | [diff] [blame] | 722 | if (!BB->hasAddressTaken() && pred_begin(BB) == pred_end(BB)) |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 723 | DTU.deleteBB(BB); |
| Arnaud A. de Grandmaison | 7b9dc28 | 2014-11-19 13:32:51 +0000 | [diff] [blame] | 724 | |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 725 | eliminateRecursiveTailCall(CI, RI, OldEntry, TailCallsAreMarkedTail, |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 726 | ArgumentPHIs, AA, ORE, DTU); |
| Evan Cheng | 73c29178 | 2011-01-29 04:53:35 +0000 | [diff] [blame] | 727 | ++NumRetDuped; |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 728 | Change = true; |
| 729 | } |
| 730 | } |
| 731 | |
| 732 | return Change; |
| 733 | } |
| 734 | |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 735 | static bool processReturningBlock( |
| 736 | ReturnInst *Ret, BasicBlock *&OldEntry, bool &TailCallsAreMarkedTail, |
| 737 | SmallVectorImpl<PHINode *> &ArgumentPHIs, |
| 738 | bool CannotTailCallElimCallsMarkedTail, const TargetTransformInfo *TTI, |
| 739 | AliasAnalysis *AA, OptimizationRemarkEmitter *ORE, DomTreeUpdater &DTU) { |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 740 | CallInst *CI = findTRECandidate(Ret, CannotTailCallElimCallsMarkedTail, TTI); |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 741 | if (!CI) |
| 742 | return false; |
| 743 | |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 744 | return eliminateRecursiveTailCall(CI, Ret, OldEntry, TailCallsAreMarkedTail, |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 745 | ArgumentPHIs, AA, ORE, DTU); |
| Evan Cheng | d983eba | 2011-01-29 04:46:23 +0000 | [diff] [blame] | 746 | } |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 747 | |
| Xin Tong | b412831 | 2017-06-19 15:21:18 +0000 | [diff] [blame] | 748 | static bool eliminateTailRecursion(Function &F, const TargetTransformInfo *TTI, |
| Davide Italiano | 4b8c8ea | 2017-07-19 21:13:22 +0000 | [diff] [blame] | 749 | AliasAnalysis *AA, |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 750 | OptimizationRemarkEmitter *ORE, |
| 751 | DomTreeUpdater &DTU) { |
| Sean Silva | 59fe82f | 2016-07-06 23:48:41 +0000 | [diff] [blame] | 752 | if (F.getFnAttribute("disable-tail-calls").getValueAsString() == "true") |
| 753 | return false; |
| 754 | |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 755 | bool MadeChange = false; |
| 756 | bool AllCallsAreTailCalls = false; |
| Davide Italiano | 4b8c8ea | 2017-07-19 21:13:22 +0000 | [diff] [blame] | 757 | MadeChange |= markTails(F, AllCallsAreTailCalls, ORE); |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 758 | if (!AllCallsAreTailCalls) |
| 759 | return MadeChange; |
| 760 | |
| 761 | // If this function is a varargs function, we won't be able to PHI the args |
| 762 | // right, so don't even try to convert it... |
| 763 | if (F.getFunctionType()->isVarArg()) |
| 764 | return false; |
| 765 | |
| 766 | BasicBlock *OldEntry = nullptr; |
| 767 | bool TailCallsAreMarkedTail = false; |
| 768 | SmallVector<PHINode*, 8> ArgumentPHIs; |
| 769 | |
| 770 | // If false, we cannot perform TRE on tail calls marked with the 'tail' |
| 771 | // attribute, because doing so would cause the stack size to increase (real |
| 772 | // TRE would deallocate variable sized allocas, TRE doesn't). |
| 773 | bool CanTRETailMarkedCall = canTRE(F); |
| 774 | |
| 775 | // Change any tail recursive calls to loops. |
| 776 | // |
| 777 | // FIXME: The code generator produces really bad code when an 'escaping |
| 778 | // alloca' is changed from being a static alloca to being a dynamic alloca. |
| 779 | // Until this is resolved, disable this transformation if that would ever |
| 780 | // happen. This bug is PR962. |
| 781 | for (Function::iterator BBI = F.begin(), E = F.end(); BBI != E; /*in loop*/) { |
| 782 | BasicBlock *BB = &*BBI++; // foldReturnAndProcessPred may delete BB. |
| 783 | if (ReturnInst *Ret = dyn_cast<ReturnInst>(BB->getTerminator())) { |
| Davide Italiano | 4b8c8ea | 2017-07-19 21:13:22 +0000 | [diff] [blame] | 784 | bool Change = processReturningBlock(Ret, OldEntry, TailCallsAreMarkedTail, |
| 785 | ArgumentPHIs, !CanTRETailMarkedCall, |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 786 | TTI, AA, ORE, DTU); |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 787 | if (!Change && BB->getFirstNonPHIOrDbg() == Ret) |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 788 | Change = foldReturnAndProcessPred( |
| 789 | BB, Ret, OldEntry, TailCallsAreMarkedTail, ArgumentPHIs, |
| 790 | !CanTRETailMarkedCall, TTI, AA, ORE, DTU); |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 791 | MadeChange |= Change; |
| 792 | } |
| 793 | } |
| 794 | |
| 795 | // If we eliminated any tail recursions, it's possible that we inserted some |
| 796 | // silly PHI nodes which just merge an initial value (the incoming operand) |
| 797 | // with themselves. Check to see if we did and clean up our mess if so. This |
| 798 | // occurs when a function passes an argument straight through to its tail |
| 799 | // call. |
| 800 | for (PHINode *PN : ArgumentPHIs) { |
| 801 | // If the PHI Node is a dynamic constant, replace it with the value it is. |
| 802 | if (Value *PNV = SimplifyInstruction(PN, F.getParent()->getDataLayout())) { |
| 803 | PN->replaceAllUsesWith(PNV); |
| 804 | PN->eraseFromParent(); |
| 805 | } |
| 806 | } |
| 807 | |
| 808 | return MadeChange; |
| 809 | } |
| 810 | |
| 811 | namespace { |
| 812 | struct TailCallElim : public FunctionPass { |
| 813 | static char ID; // Pass identification, replacement for typeid |
| 814 | TailCallElim() : FunctionPass(ID) { |
| 815 | initializeTailCallElimPass(*PassRegistry::getPassRegistry()); |
| 816 | } |
| 817 | |
| 818 | void getAnalysisUsage(AnalysisUsage &AU) const override { |
| 819 | AU.addRequired<TargetTransformInfoWrapperPass>(); |
| Xin Tong | b412831 | 2017-06-19 15:21:18 +0000 | [diff] [blame] | 820 | AU.addRequired<AAResultsWrapperPass>(); |
| Davide Italiano | 4b8c8ea | 2017-07-19 21:13:22 +0000 | [diff] [blame] | 821 | AU.addRequired<OptimizationRemarkEmitterWrapperPass>(); |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 822 | AU.addPreserved<GlobalsAAWrapperPass>(); |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 823 | AU.addPreserved<DominatorTreeWrapperPass>(); |
| 824 | AU.addPreserved<PostDominatorTreeWrapperPass>(); |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 825 | } |
| 826 | |
| 827 | bool runOnFunction(Function &F) override { |
| Sean Silva | 59fe82f | 2016-07-06 23:48:41 +0000 | [diff] [blame] | 828 | if (skipFunction(F)) |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 829 | return false; |
| 830 | |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 831 | auto *DTWP = getAnalysisIfAvailable<DominatorTreeWrapperPass>(); |
| 832 | auto *DT = DTWP ? &DTWP->getDomTree() : nullptr; |
| 833 | auto *PDTWP = getAnalysisIfAvailable<PostDominatorTreeWrapperPass>(); |
| 834 | auto *PDT = PDTWP ? &PDTWP->getPostDomTree() : nullptr; |
| 835 | // There is no noticable performance difference here between Lazy and Eager |
| 836 | // UpdateStrategy based on some test results. It is feasible to switch the |
| 837 | // UpdateStrategy to Lazy if we find it profitable later. |
| 838 | DomTreeUpdater DTU(DT, PDT, DomTreeUpdater::UpdateStrategy::Eager); |
| 839 | |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 840 | return eliminateTailRecursion( |
| Xin Tong | b412831 | 2017-06-19 15:21:18 +0000 | [diff] [blame] | 841 | F, &getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F), |
| Davide Italiano | 4b8c8ea | 2017-07-19 21:13:22 +0000 | [diff] [blame] | 842 | &getAnalysis<AAResultsWrapperPass>().getAAResults(), |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 843 | &getAnalysis<OptimizationRemarkEmitterWrapperPass>().getORE(), DTU); |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 844 | } |
| 845 | }; |
| 846 | } |
| 847 | |
| 848 | char TailCallElim::ID = 0; |
| 849 | INITIALIZE_PASS_BEGIN(TailCallElim, "tailcallelim", "Tail Call Elimination", |
| 850 | false, false) |
| 851 | INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass) |
| Davide Italiano | 4b8c8ea | 2017-07-19 21:13:22 +0000 | [diff] [blame] | 852 | INITIALIZE_PASS_DEPENDENCY(OptimizationRemarkEmitterWrapperPass) |
| Davide Italiano | 8738363 | 2016-06-02 23:02:44 +0000 | [diff] [blame] | 853 | INITIALIZE_PASS_END(TailCallElim, "tailcallelim", "Tail Call Elimination", |
| 854 | false, false) |
| 855 | |
| 856 | // Public interface to the TailCallElimination pass |
| 857 | FunctionPass *llvm::createTailCallEliminationPass() { |
| 858 | return new TailCallElim(); |
| 859 | } |
| Sean Silva | 59fe82f | 2016-07-06 23:48:41 +0000 | [diff] [blame] | 860 | |
| 861 | PreservedAnalyses TailCallElimPass::run(Function &F, |
| 862 | FunctionAnalysisManager &AM) { |
| 863 | |
| 864 | TargetTransformInfo &TTI = AM.getResult<TargetIRAnalysis>(F); |
| Xin Tong | b412831 | 2017-06-19 15:21:18 +0000 | [diff] [blame] | 865 | AliasAnalysis &AA = AM.getResult<AAManager>(F); |
| Davide Italiano | 4b8c8ea | 2017-07-19 21:13:22 +0000 | [diff] [blame] | 866 | auto &ORE = AM.getResult<OptimizationRemarkEmitterAnalysis>(F); |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 867 | auto *DT = AM.getCachedResult<DominatorTreeAnalysis>(F); |
| 868 | auto *PDT = AM.getCachedResult<PostDominatorTreeAnalysis>(F); |
| 869 | // There is no noticable performance difference here between Lazy and Eager |
| 870 | // UpdateStrategy based on some test results. It is feasible to switch the |
| 871 | // UpdateStrategy to Lazy if we find it profitable later. |
| 872 | DomTreeUpdater DTU(DT, PDT, DomTreeUpdater::UpdateStrategy::Eager); |
| 873 | bool Changed = eliminateTailRecursion(F, &TTI, &AA, &ORE, DTU); |
| Sean Silva | 59fe82f | 2016-07-06 23:48:41 +0000 | [diff] [blame] | 874 | |
| 875 | if (!Changed) |
| 876 | return PreservedAnalyses::all(); |
| 877 | PreservedAnalyses PA; |
| 878 | PA.preserve<GlobalsAA>(); |
| Chijun Sima | 8b5de48 | 2018-08-04 08:13:47 +0000 | [diff] [blame] | 879 | PA.preserve<DominatorTreeAnalysis>(); |
| 880 | PA.preserve<PostDominatorTreeAnalysis>(); |
| Sean Silva | 59fe82f | 2016-07-06 23:48:41 +0000 | [diff] [blame] | 881 | return PA; |
| 882 | } |