blob: 1b7ae72a5eba80c93959d07a834b2c0faf7ff3b0 [file] [log] [blame]
Chris Lattner530d4bf2003-05-29 15:11:31 +00001//===- InlineFunction.cpp - Code to perform function inlining -------------===//
John Criswell482202a2003-10-20 19:43:21 +00002//
3// The LLVM Compiler Infrastructure
4//
5// This file was developed by the LLVM research group and is distributed under
6// the University of Illinois Open Source License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
Chris Lattner530d4bf2003-05-29 15:11:31 +00009//
10// This file implements inlining of a function into a call site, resolving
11// parameters and the return value as appropriate.
12//
13// FIXME: This pass should transform alloca instructions in the called function
Chris Lattner9fc977e2004-02-04 01:41:09 +000014// into alloca/dealloca pairs! Or perhaps it should refuse to inline them!
Chris Lattner530d4bf2003-05-29 15:11:31 +000015//
16//===----------------------------------------------------------------------===//
17
18#include "llvm/Transforms/Utils/Cloning.h"
Chris Lattner0cc265e2003-08-24 06:59:16 +000019#include "llvm/Constant.h"
Chris Lattnerfc3fe5c2003-08-24 04:06:56 +000020#include "llvm/DerivedTypes.h"
Chris Lattner530d4bf2003-05-29 15:11:31 +000021#include "llvm/Module.h"
Chris Lattner0cc265e2003-08-24 06:59:16 +000022#include "llvm/Instructions.h"
23#include "llvm/Intrinsics.h"
24#include "llvm/Support/CallSite.h"
Chris Lattnerfc3fe5c2003-08-24 04:06:56 +000025#include "llvm/Transforms/Utils/Local.h"
Chris Lattnerdf3c3422004-01-09 06:12:26 +000026using namespace llvm;
Chris Lattner530d4bf2003-05-29 15:11:31 +000027
Chris Lattnerdf3c3422004-01-09 06:12:26 +000028bool llvm::InlineFunction(CallInst *CI) { return InlineFunction(CallSite(CI)); }
29bool llvm::InlineFunction(InvokeInst *II) {return InlineFunction(CallSite(II));}
Chris Lattner0cc265e2003-08-24 06:59:16 +000030
Chris Lattner530d4bf2003-05-29 15:11:31 +000031// InlineFunction - This function inlines the called function into the basic
32// block of the caller. This returns false if it is not possible to inline this
33// call. The program is still in a well defined state if this occurs though.
34//
35// Note that this only does one level of inlining. For example, if the
36// instruction 'call B' is inlined, and 'B' calls 'C', then the call to 'C' now
37// exists in the instruction stream. Similiarly this will inline a recursive
38// function by one level.
39//
Chris Lattnerdf3c3422004-01-09 06:12:26 +000040bool llvm::InlineFunction(CallSite CS) {
Chris Lattner0cc265e2003-08-24 06:59:16 +000041 Instruction *TheCall = CS.getInstruction();
42 assert(TheCall->getParent() && TheCall->getParent()->getParent() &&
43 "Instruction not in function!");
Chris Lattner530d4bf2003-05-29 15:11:31 +000044
Chris Lattner0cc265e2003-08-24 06:59:16 +000045 const Function *CalledFunc = CS.getCalledFunction();
Chris Lattner530d4bf2003-05-29 15:11:31 +000046 if (CalledFunc == 0 || // Can't inline external function or indirect
47 CalledFunc->isExternal() || // call, or call to a vararg function!
48 CalledFunc->getFunctionType()->isVarArg()) return false;
49
Chris Lattner0cc265e2003-08-24 06:59:16 +000050 BasicBlock *OrigBB = TheCall->getParent();
Chris Lattner530d4bf2003-05-29 15:11:31 +000051 Function *Caller = OrigBB->getParent();
52
Chris Lattner9fc977e2004-02-04 01:41:09 +000053 // Get an iterator to the last basic block in the function, which will have
54 // the new function inlined after it.
55 //
56 Function::iterator LastBlock = &Caller->back();
57
Chris Lattner18ef3fd2004-02-04 02:51:48 +000058 // Make sure to capture all of the return instructions from the cloned
Chris Lattner530d4bf2003-05-29 15:11:31 +000059 // function.
Chris Lattner18ef3fd2004-02-04 02:51:48 +000060 std::vector<ReturnInst*> Returns;
61 { // Scope to destroy ValueMap after cloning.
62 // Calculate the vector of arguments to pass into the function cloner...
63 std::map<const Value*, Value*> ValueMap;
64 assert(std::distance(CalledFunc->abegin(), CalledFunc->aend()) ==
65 std::distance(CS.arg_begin(), CS.arg_end()) &&
66 "No varargs calls can be inlined!");
67
68 CallSite::arg_iterator AI = CS.arg_begin();
69 for (Function::const_aiterator I = CalledFunc->abegin(),
70 E = CalledFunc->aend(); I != E; ++I, ++AI)
71 ValueMap[I] = *AI;
72
73 // Clone the entire body of the callee into the caller.
74 CloneFunctionInto(Caller, CalledFunc, ValueMap, Returns, ".i");
75 }
Chris Lattner6f8865b2004-02-04 21:33:42 +000076
Chris Lattner18ef3fd2004-02-04 02:51:48 +000077 // Remember the first block that is newly cloned over.
78 Function::iterator FirstNewBlock = LastBlock; ++FirstNewBlock;
Chris Lattner530d4bf2003-05-29 15:11:31 +000079
80 // If there are any alloca instructions in the block that used to be the entry
81 // block for the callee, move them to the entry block of the caller. First
82 // calculate which instruction they should be inserted before. We insert the
83 // instructions at the end of the current alloca list.
84 //
Chris Lattner18ef3fd2004-02-04 02:51:48 +000085 if (isa<AllocaInst>(FirstNewBlock->begin())) {
Chris Lattner0cc265e2003-08-24 06:59:16 +000086 BasicBlock::iterator InsertPoint = Caller->begin()->begin();
Chris Lattner18ef3fd2004-02-04 02:51:48 +000087 for (BasicBlock::iterator I = FirstNewBlock->begin(),
88 E = FirstNewBlock->end(); I != E; )
Chris Lattnerb4778c72003-10-14 01:11:07 +000089 if (AllocaInst *AI = dyn_cast<AllocaInst>(I++))
90 if (isa<Constant>(AI->getArraySize())) {
Chris Lattner6f8865b2004-02-04 21:33:42 +000091 // Scan for the block of allocas that we can move over.
92 while (isa<AllocaInst>(I) &&
93 isa<Constant>(cast<AllocaInst>(I)->getArraySize()))
94 ++I;
95
96 // Transfer all of the allocas over in a block. Using splice means
97 // that they instructions aren't removed from the symbol table, then
98 // reinserted.
99 Caller->front().getInstList().splice(InsertPoint,
100 FirstNewBlock->getInstList(),
101 AI, I);
Chris Lattnerb4778c72003-10-14 01:11:07 +0000102 }
Chris Lattner0cc265e2003-08-24 06:59:16 +0000103 }
Chris Lattner530d4bf2003-05-29 15:11:31 +0000104
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000105 // If we are inlining for an invoke instruction, we must make sure to rewrite
106 // any inlined 'unwind' instructions into branches to the invoke exception
107 // destination, and call instructions into invoke instructions.
108 if (InvokeInst *II = dyn_cast<InvokeInst>(TheCall)) {
Chris Lattnerfae8ab32004-02-08 21:44:31 +0000109 BasicBlock *InvokeDest = II->getUnwindDest();
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000110 std::vector<Value*> InvokeDestPHIValues;
111
112 // If there are PHI nodes in the exceptional destination block, we need to
113 // keep track of which values came into them from this invoke, then remove
114 // the entry for this block.
115 for (BasicBlock::iterator I = InvokeDest->begin();
116 PHINode *PN = dyn_cast<PHINode>(I); ++I)
117 // Save the value to use for this edge...
118 InvokeDestPHIValues.push_back(PN->getIncomingValueForBlock(OrigBB));
119
120 for (Function::iterator BB = FirstNewBlock, E = Caller->end();
121 BB != E; ++BB) {
Chris Lattner0cc265e2003-08-24 06:59:16 +0000122 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ) {
123 // We only need to check for function calls: inlined invoke instructions
124 // require no special handling...
125 if (CallInst *CI = dyn_cast<CallInst>(I)) {
Chris Lattner283ffdf2004-02-13 16:47:35 +0000126 // Convert this function call into an invoke instruction... if it's
127 // not an intrinsic function call (which are known to not throw).
128 if (CI->getCalledFunction() &&
129 CI->getCalledFunction()->getIntrinsicID()) {
130 ++I;
131 } else {
132 // First, split the basic block...
133 BasicBlock *Split = BB->splitBasicBlock(CI, CI->getName()+".noexc");
134
135 // Next, create the new invoke instruction, inserting it at the end
136 // of the old basic block.
137 InvokeInst *II =
138 new InvokeInst(CI->getCalledValue(), Split, InvokeDest,
139 std::vector<Value*>(CI->op_begin()+1, CI->op_end()),
140 CI->getName(), BB->getTerminator());
Chris Lattner0cc265e2003-08-24 06:59:16 +0000141
Chris Lattner283ffdf2004-02-13 16:47:35 +0000142 // Make sure that anything using the call now uses the invoke!
143 CI->replaceAllUsesWith(II);
144
145 // Delete the unconditional branch inserted by splitBasicBlock
146 BB->getInstList().pop_back();
147 Split->getInstList().pop_front(); // Delete the original call
148
149 // Update any PHI nodes in the exceptional block to indicate that
150 // there is now a new entry in them.
151 unsigned i = 0;
152 for (BasicBlock::iterator I = InvokeDest->begin();
153 PHINode *PN = dyn_cast<PHINode>(I); ++I, ++i)
154 PN->addIncoming(InvokeDestPHIValues[i], BB);
155
156 // This basic block is now complete, start scanning the next one.
157 break;
158 }
Chris Lattner0cc265e2003-08-24 06:59:16 +0000159 } else {
160 ++I;
161 }
162 }
Chris Lattner530d4bf2003-05-29 15:11:31 +0000163
Chris Lattner04ecefe2003-09-08 19:44:26 +0000164 if (UnwindInst *UI = dyn_cast<UnwindInst>(BB->getTerminator())) {
165 // An UnwindInst requires special handling when it gets inlined into an
166 // invoke site. Once this happens, we know that the unwind would cause
167 // a control transfer to the invoke exception destination, so we can
168 // transform it into a direct branch to the exception destination.
Chris Lattner2af51722003-11-20 18:25:24 +0000169 new BranchInst(InvokeDest, UI);
Chris Lattner04ecefe2003-09-08 19:44:26 +0000170
171 // Delete the unwind instruction!
172 UI->getParent()->getInstList().pop_back();
Chris Lattnerd9f4ffd2003-10-27 05:33:09 +0000173
174 // Update any PHI nodes in the exceptional block to indicate that
175 // there is now a new entry in them.
176 unsigned i = 0;
177 for (BasicBlock::iterator I = InvokeDest->begin();
178 PHINode *PN = dyn_cast<PHINode>(I); ++I, ++i)
179 PN->addIncoming(InvokeDestPHIValues[i], BB);
Chris Lattner04ecefe2003-09-08 19:44:26 +0000180 }
181 }
182
Chris Lattner0178d262003-09-22 21:59:27 +0000183 // Now that everything is happy, we have one final detail. The PHI nodes in
184 // the exception destination block still have entries due to the original
185 // invoke instruction. Eliminate these entries (which might even delete the
186 // PHI node) now.
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000187 InvokeDest->removePredecessor(II->getParent());
Chris Lattner0178d262003-09-22 21:59:27 +0000188 }
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000189
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000190 // If we cloned in _exactly one_ basic block, and if that block ends in a
191 // return instruction, we splice the body of the inlined callee directly into
192 // the calling basic block.
193 if (Returns.size() == 1 && std::distance(FirstNewBlock, Caller->end()) == 1) {
194 // Move all of the instructions right before the call.
195 OrigBB->getInstList().splice(TheCall, FirstNewBlock->getInstList(),
196 FirstNewBlock->begin(), FirstNewBlock->end());
197 // Remove the cloned basic block.
198 Caller->getBasicBlockList().pop_back();
199
200 // If the call site was an invoke instruction, add a branch to the normal
201 // destination.
202 if (InvokeInst *II = dyn_cast<InvokeInst>(TheCall))
203 new BranchInst(II->getNormalDest(), TheCall);
204
205 // If the return instruction returned a value, replace uses of the call with
206 // uses of the returned value.
207 if (!TheCall->use_empty())
208 TheCall->replaceAllUsesWith(Returns[0]->getReturnValue());
209
210 // Since we are now done with the Call/Invoke, we can delete it.
211 TheCall->getParent()->getInstList().erase(TheCall);
212
213 // Since we are now done with the return instruction, delete it also.
214 Returns[0]->getParent()->getInstList().erase(Returns[0]);
215
216 // We are now done with the inlining.
217 return true;
218 }
219
220 // Otherwise, we have the normal case, of more than one block to inline or
221 // multiple return sites.
222
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000223 // We want to clone the entire callee function into the hole between the
224 // "starter" and "ender" blocks. How we accomplish this depends on whether
225 // this is an invoke instruction or a call instruction.
226 BasicBlock *AfterCallBB;
227 if (InvokeInst *II = dyn_cast<InvokeInst>(TheCall)) {
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000228
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000229 // Add an unconditional branch to make this look like the CallInst case...
230 BranchInst *NewBr = new BranchInst(II->getNormalDest(), TheCall);
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000231
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000232 // Split the basic block. This guarantees that no PHI nodes will have to be
233 // updated due to new incoming edges, and make the invoke case more
234 // symmetric to the call case.
235 AfterCallBB = OrigBB->splitBasicBlock(NewBr,
236 CalledFunc->getName()+".entry");
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000237
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000238 } else { // It's a call
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000239 // If this is a call instruction, we need to split the basic block that
240 // the call lives in.
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000241 //
242 AfterCallBB = OrigBB->splitBasicBlock(TheCall,
243 CalledFunc->getName()+".entry");
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000244 }
245
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000246 // Change the branch that used to go to AfterCallBB to branch to the first
247 // basic block of the inlined function.
248 //
249 TerminatorInst *Br = OrigBB->getTerminator();
250 assert(Br && Br->getOpcode() == Instruction::Br &&
251 "splitBasicBlock broken!");
252 Br->setOperand(0, FirstNewBlock);
253
254
255 // Now that the function is correct, make it a little bit nicer. In
256 // particular, move the basic blocks inserted from the end of the function
257 // into the space made by splitting the source basic block.
258 //
259 Caller->getBasicBlockList().splice(AfterCallBB, Caller->getBasicBlockList(),
260 FirstNewBlock, Caller->end());
261
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000262 // Handle all of the return instructions that we just cloned in, and eliminate
263 // any users of the original call/invoke instruction.
264 if (Returns.size() > 1) {
265 // The PHI node should go at the front of the new basic block to merge all
266 // possible incoming values.
267 //
268 PHINode *PHI = 0;
269 if (!TheCall->use_empty()) {
270 PHI = new PHINode(CalledFunc->getReturnType(),
271 TheCall->getName(), AfterCallBB->begin());
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000272
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000273 // Anything that used the result of the function call should now use the
274 // PHI node as their operand.
275 //
276 TheCall->replaceAllUsesWith(PHI);
277 }
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000278
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000279 // Loop over all of the return instructions, turning them into unconditional
280 // branches to the merge point now, and adding entries to the PHI node as
281 // appropriate.
282 for (unsigned i = 0, e = Returns.size(); i != e; ++i) {
283 ReturnInst *RI = Returns[i];
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000284
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000285 if (PHI) {
286 assert(RI->getReturnValue() && "Ret should have value!");
287 assert(RI->getReturnValue()->getType() == PHI->getType() &&
288 "Ret value not consistent in function!");
289 PHI->addIncoming(RI->getReturnValue(), RI->getParent());
290 }
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000291
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000292 // Add a branch to the merge point where the PHI node lives if it exists.
293 new BranchInst(AfterCallBB, RI);
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000294
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000295 // Delete the return instruction now
296 RI->getParent()->getInstList().erase(RI);
297 }
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000298
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000299 } else if (!Returns.empty()) {
300 // Otherwise, if there is exactly one return value, just replace anything
301 // using the return value of the call with the computed value.
302 if (!TheCall->use_empty())
303 TheCall->replaceAllUsesWith(Returns[0]->getReturnValue());
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000304
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000305 // Add a branch to the merge point where the PHI node lives if it exists.
306 new BranchInst(AfterCallBB, Returns[0]);
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000307
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000308 // Delete the return instruction now
309 Returns[0]->getParent()->getInstList().erase(Returns[0]);
310 }
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000311
Chris Lattner18ef3fd2004-02-04 02:51:48 +0000312 // Since we are now done with the Call/Invoke, we can delete it.
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000313 TheCall->getParent()->getInstList().erase(TheCall);
Chris Lattner530d4bf2003-05-29 15:11:31 +0000314
Chris Lattnerfc3fe5c2003-08-24 04:06:56 +0000315 // We should always be able to fold the entry block of the function into the
316 // single predecessor of the block...
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000317 assert(cast<BranchInst>(Br)->isUnconditional() &&"splitBasicBlock broken!");
Chris Lattnerfc3fe5c2003-08-24 04:06:56 +0000318 BasicBlock *CalleeEntry = cast<BranchInst>(Br)->getSuccessor(0);
319 SimplifyCFG(CalleeEntry);
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000320
Chris Lattnerfc3fe5c2003-08-24 04:06:56 +0000321 // Okay, continue the CFG cleanup. It's often the case that there is only a
322 // single return instruction in the callee function. If this is the case,
Chris Lattner0cc265e2003-08-24 06:59:16 +0000323 // then we have an unconditional branch from the return block to the
324 // 'AfterCallBB'. Check for this case, and eliminate the branch is possible.
325 SimplifyCFG(AfterCallBB);
Chris Lattner0fa8c7c2004-02-04 04:17:06 +0000326
Chris Lattner530d4bf2003-05-29 15:11:31 +0000327 return true;
328}