blob: 91a85c07196ce76f397d73216e80bfee41d8b0fe [file] [log] [blame]
Jim Grosbach8b818d72009-08-17 16:41:22 +00001//===- SjLjEHPass.cpp - Eliminate Invoke & Unwind instructions -----------===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This transformation is designed for use by code generators which use SjLj
11// based exception handling.
12//
13//===----------------------------------------------------------------------===//
14
15#define DEBUG_TYPE "sjljehprepare"
16#include "llvm/Transforms/Scalar.h"
17#include "llvm/Constants.h"
18#include "llvm/DerivedTypes.h"
19#include "llvm/Instructions.h"
20#include "llvm/Intrinsics.h"
21#include "llvm/LLVMContext.h"
22#include "llvm/Module.h"
23#include "llvm/Pass.h"
24#include "llvm/CodeGen/Passes.h"
25#include "llvm/Transforms/Utils/BasicBlockUtils.h"
26#include "llvm/Transforms/Utils/Local.h"
Jim Grosbach8fc3b692009-08-20 01:03:48 +000027#include "llvm/ADT/DenseMap.h"
Jim Grosbach8b818d72009-08-17 16:41:22 +000028#include "llvm/ADT/Statistic.h"
Jim Grosbach606f3d62009-08-17 21:40:03 +000029#include "llvm/ADT/SmallVector.h"
Jim Grosbach8b818d72009-08-17 16:41:22 +000030#include "llvm/Support/CommandLine.h"
31#include "llvm/Support/Compiler.h"
32#include "llvm/Support/Debug.h"
33#include "llvm/Support/raw_ostream.h"
34#include "llvm/Target/TargetLowering.h"
Jim Grosbach8b818d72009-08-17 16:41:22 +000035using namespace llvm;
36
37STATISTIC(NumInvokes, "Number of invokes replaced");
38STATISTIC(NumUnwinds, "Number of unwinds replaced");
39STATISTIC(NumSpilled, "Number of registers live across unwind edges");
40
41namespace {
42 class VISIBILITY_HIDDEN SjLjEHPass : public FunctionPass {
43
44 const TargetLowering *TLI;
45
46 const Type *FunctionContextTy;
47 Constant *RegisterFn;
48 Constant *UnregisterFn;
49 Constant *ResumeFn;
50 Constant *BuiltinSetjmpFn;
51 Constant *FrameAddrFn;
52 Constant *LSDAAddrFn;
53 Value *PersonalityFn;
54 Constant *Selector32Fn;
55 Constant *Selector64Fn;
56 Constant *ExceptionFn;
57
58 Value *CallSite;
59 public:
60 static char ID; // Pass identification, replacement for typeid
61 explicit SjLjEHPass(const TargetLowering *tli = NULL)
62 : FunctionPass(&ID), TLI(tli) { }
63 bool doInitialization(Module &M);
64 bool runOnFunction(Function &F);
65
66 virtual void getAnalysisUsage(AnalysisUsage &AU) const { }
67 const char *getPassName() const {
68 return "SJLJ Exception Handling preparation";
69 }
70
71 private:
72 void markInvokeCallSite(InvokeInst *II, unsigned InvokeNo,
Jim Grosbach8fc3b692009-08-20 01:03:48 +000073 Value *CallSite);
Jim Grosbach606f3d62009-08-17 21:40:03 +000074 void splitLiveRangesLiveAcrossInvokes(SmallVector<InvokeInst*,16> &Invokes);
Jim Grosbach8b818d72009-08-17 16:41:22 +000075 bool insertSjLjEHSupport(Function &F);
76 };
77} // end anonymous namespace
78
79char SjLjEHPass::ID = 0;
80
81// Public Interface To the SjLjEHPass pass.
82FunctionPass *llvm::createSjLjEHPass(const TargetLowering *TLI) {
83 return new SjLjEHPass(TLI);
84}
85// doInitialization - Make sure that there is a prototype for abort in the
86// current module.
87bool SjLjEHPass::doInitialization(Module &M) {
88 // Build the function context structure.
89 // builtin_setjmp uses a five word jbuf
90 const Type *VoidPtrTy =
91 PointerType::getUnqual(Type::getInt8Ty(M.getContext()));
92 const Type *Int32Ty = Type::getInt32Ty(M.getContext());
93 FunctionContextTy =
94 StructType::get(M.getContext(),
95 VoidPtrTy, // __prev
96 Int32Ty, // call_site
97 ArrayType::get(Int32Ty, 4), // __data
98 VoidPtrTy, // __personality
99 VoidPtrTy, // __lsda
100 ArrayType::get(VoidPtrTy, 5), // __jbuf
101 NULL);
102 RegisterFn = M.getOrInsertFunction("_Unwind_SjLj_Register",
103 Type::getVoidTy(M.getContext()),
104 PointerType::getUnqual(FunctionContextTy),
105 (Type *)0);
106 UnregisterFn =
107 M.getOrInsertFunction("_Unwind_SjLj_Unregister",
108 Type::getVoidTy(M.getContext()),
109 PointerType::getUnqual(FunctionContextTy),
110 (Type *)0);
111 ResumeFn =
112 M.getOrInsertFunction("_Unwind_SjLj_Resume",
113 Type::getVoidTy(M.getContext()),
114 VoidPtrTy,
115 (Type *)0);
116 FrameAddrFn = Intrinsic::getDeclaration(&M, Intrinsic::frameaddress);
117 BuiltinSetjmpFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_setjmp);
118 LSDAAddrFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_sjlj_lsda);
119 Selector32Fn = Intrinsic::getDeclaration(&M, Intrinsic::eh_selector_i32);
120 Selector64Fn = Intrinsic::getDeclaration(&M, Intrinsic::eh_selector_i64);
121 ExceptionFn = Intrinsic::getDeclaration(&M, Intrinsic::eh_exception);
122
123 return true;
124}
125
126/// markInvokeCallSite - Insert code to mark the call_site for this invoke
127void SjLjEHPass::markInvokeCallSite(InvokeInst *II, unsigned InvokeNo,
Jim Grosbach8fc3b692009-08-20 01:03:48 +0000128 Value *CallSite) {
Jim Grosbach8b818d72009-08-17 16:41:22 +0000129 ConstantInt *CallSiteNoC= ConstantInt::get(Type::getInt32Ty(II->getContext()),
130 InvokeNo);
Jim Grosbach8b818d72009-08-17 16:41:22 +0000131
132 // If the unwind edge has phi nodes, split the edge.
133 if (isa<PHINode>(II->getUnwindDest()->begin())) {
134 SplitCriticalEdge(II, 1, this);
135
136 // If there are any phi nodes left, they must have a single predecessor.
137 while (PHINode *PN = dyn_cast<PHINode>(II->getUnwindDest()->begin())) {
138 PN->replaceAllUsesWith(PN->getIncomingValue(0));
139 PN->eraseFromParent();
140 }
141 }
142
143 // Insert a store of the invoke num before the invoke and store zero into the
144 // location afterward.
145 new StoreInst(CallSiteNoC, CallSite, true, II); // volatile
146
Jim Grosbach8b818d72009-08-17 16:41:22 +0000147 // We still want this to look like an invoke so we emit the LSDA properly
148 // FIXME: ??? Or will this cause strangeness with mis-matched IDs like
149 // when it was in the front end?
150}
151
152/// MarkBlocksLiveIn - Insert BB and all of its predescessors into LiveBBs until
153/// we reach blocks we've already seen.
154static void MarkBlocksLiveIn(BasicBlock *BB, std::set<BasicBlock*> &LiveBBs) {
155 if (!LiveBBs.insert(BB).second) return; // already been here.
156
157 for (pred_iterator PI = pred_begin(BB), E = pred_end(BB); PI != E; ++PI)
158 MarkBlocksLiveIn(*PI, LiveBBs);
159}
160
Jim Grosbach606f3d62009-08-17 21:40:03 +0000161/// splitLiveRangesAcrossInvokes - Each value that is live across an unwind edge
162/// we spill into a stack location, guaranteeing that there is nothing live
163/// across the unwind edge. This process also splits all critical edges
164/// coming out of invoke's.
Jim Grosbach8b818d72009-08-17 16:41:22 +0000165void SjLjEHPass::
Jim Grosbach606f3d62009-08-17 21:40:03 +0000166splitLiveRangesLiveAcrossInvokes(SmallVector<InvokeInst*,16> &Invokes) {
Jim Grosbach8b818d72009-08-17 16:41:22 +0000167 // First step, split all critical edges from invoke instructions.
168 for (unsigned i = 0, e = Invokes.size(); i != e; ++i) {
169 InvokeInst *II = Invokes[i];
170 SplitCriticalEdge(II, 0, this);
171 SplitCriticalEdge(II, 1, this);
172 assert(!isa<PHINode>(II->getNormalDest()) &&
173 !isa<PHINode>(II->getUnwindDest()) &&
174 "critical edge splitting left single entry phi nodes?");
175 }
176
177 Function *F = Invokes.back()->getParent()->getParent();
178
179 // To avoid having to handle incoming arguments specially, we lower each arg
180 // to a copy instruction in the entry block. This ensures that the argument
181 // value itself cannot be live across the entry block.
182 BasicBlock::iterator AfterAllocaInsertPt = F->begin()->begin();
183 while (isa<AllocaInst>(AfterAllocaInsertPt) &&
184 isa<ConstantInt>(cast<AllocaInst>(AfterAllocaInsertPt)->getArraySize()))
185 ++AfterAllocaInsertPt;
186 for (Function::arg_iterator AI = F->arg_begin(), E = F->arg_end();
187 AI != E; ++AI) {
188 // This is always a no-op cast because we're casting AI to AI->getType() so
189 // src and destination types are identical. BitCast is the only possibility.
190 CastInst *NC = new BitCastInst(
191 AI, AI->getType(), AI->getName()+".tmp", AfterAllocaInsertPt);
192 AI->replaceAllUsesWith(NC);
193 // Normally its is forbidden to replace a CastInst's operand because it
194 // could cause the opcode to reflect an illegal conversion. However, we're
195 // replacing it here with the same value it was constructed with to simply
196 // make NC its user.
197 NC->setOperand(0, AI);
198 }
199
200 // Finally, scan the code looking for instructions with bad live ranges.
201 for (Function::iterator BB = F->begin(), E = F->end(); BB != E; ++BB)
202 for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E; ++II) {
203 // Ignore obvious cases we don't have to handle. In particular, most
204 // instructions either have no uses or only have a single use inside the
205 // current block. Ignore them quickly.
206 Instruction *Inst = II;
207 if (Inst->use_empty()) continue;
208 if (Inst->hasOneUse() &&
209 cast<Instruction>(Inst->use_back())->getParent() == BB &&
210 !isa<PHINode>(Inst->use_back())) continue;
211
212 // If this is an alloca in the entry block, it's not a real register
213 // value.
214 if (AllocaInst *AI = dyn_cast<AllocaInst>(Inst))
215 if (isa<ConstantInt>(AI->getArraySize()) && BB == F->begin())
216 continue;
217
218 // Avoid iterator invalidation by copying users to a temporary vector.
Jim Grosbach606f3d62009-08-17 21:40:03 +0000219 SmallVector<Instruction*,16> Users;
Jim Grosbach8b818d72009-08-17 16:41:22 +0000220 for (Value::use_iterator UI = Inst->use_begin(), E = Inst->use_end();
221 UI != E; ++UI) {
222 Instruction *User = cast<Instruction>(*UI);
223 if (User->getParent() != BB || isa<PHINode>(User))
224 Users.push_back(User);
225 }
226
Jim Grosbach8b818d72009-08-17 16:41:22 +0000227 // Find all of the blocks that this value is live in.
228 std::set<BasicBlock*> LiveBBs;
229 LiveBBs.insert(Inst->getParent());
230 while (!Users.empty()) {
231 Instruction *U = Users.back();
232 Users.pop_back();
233
234 if (!isa<PHINode>(U)) {
235 MarkBlocksLiveIn(U->getParent(), LiveBBs);
236 } else {
237 // Uses for a PHI node occur in their predecessor block.
238 PHINode *PN = cast<PHINode>(U);
239 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i)
240 if (PN->getIncomingValue(i) == Inst)
241 MarkBlocksLiveIn(PN->getIncomingBlock(i), LiveBBs);
242 }
243 }
244
245 // Now that we know all of the blocks that this thing is live in, see if
246 // it includes any of the unwind locations.
247 bool NeedsSpill = false;
248 for (unsigned i = 0, e = Invokes.size(); i != e; ++i) {
249 BasicBlock *UnwindBlock = Invokes[i]->getUnwindDest();
250 if (UnwindBlock != BB && LiveBBs.count(UnwindBlock)) {
251 NeedsSpill = true;
252 }
253 }
254
255 // If we decided we need a spill, do it.
256 if (NeedsSpill) {
257 ++NumSpilled;
258 DemoteRegToStack(*Inst, true);
259 }
260 }
261}
262
263bool SjLjEHPass::insertSjLjEHSupport(Function &F) {
Jim Grosbach606f3d62009-08-17 21:40:03 +0000264 SmallVector<ReturnInst*,16> Returns;
265 SmallVector<UnwindInst*,16> Unwinds;
266 SmallVector<InvokeInst*,16> Invokes;
Jim Grosbach8b818d72009-08-17 16:41:22 +0000267
268 // Look through the terminators of the basic blocks to find invokes, returns
269 // and unwinds
270 for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB)
271 if (ReturnInst *RI = dyn_cast<ReturnInst>(BB->getTerminator())) {
272 // Remember all return instructions in case we insert an invoke into this
273 // function.
274 Returns.push_back(RI);
275 } else if (InvokeInst *II = dyn_cast<InvokeInst>(BB->getTerminator())) {
276 Invokes.push_back(II);
277 } else if (UnwindInst *UI = dyn_cast<UnwindInst>(BB->getTerminator())) {
278 Unwinds.push_back(UI);
279 }
280 // If we don't have any invokes or unwinds, there's nothing to do.
281 if (Unwinds.empty() && Invokes.empty()) return false;
282
283 NumInvokes += Invokes.size();
284 NumUnwinds += Unwinds.size();
285
286
287 if (!Invokes.empty()) {
288 // We have invokes, so we need to add register/unregister calls to get
289 // this function onto the global unwind stack.
Jim Grosbach8b818d72009-08-17 16:41:22 +0000290
291 BasicBlock *EntryBB = F.begin();
292 // Create an alloca for the incoming jump buffer ptr and the new jump buffer
293 // that needs to be restored on all exits from the function. This is an
294 // alloca because the value needs to be added to the global context list.
295 unsigned Align = 4; // FIXME: Should be a TLI check?
296 AllocaInst *FunctionContext =
297 new AllocaInst(FunctionContextTy, 0, Align,
298 "fcn_context", F.begin()->begin());
299
300 Value *Idxs[2];
301 const Type *Int32Ty = Type::getInt32Ty(F.getContext());
302 Value *Zero = ConstantInt::get(Int32Ty, 0);
303 // We need to also keep around a reference to the call_site field
304 Idxs[0] = Zero;
305 Idxs[1] = ConstantInt::get(Int32Ty, 1);
306 CallSite = GetElementPtrInst::Create(FunctionContext, Idxs, Idxs+2,
307 "call_site",
308 EntryBB->getTerminator());
309
310 // The exception selector comes back in context->data[1]
311 Idxs[1] = ConstantInt::get(Int32Ty, 2);
312 Value *FCData = GetElementPtrInst::Create(FunctionContext, Idxs, Idxs+2,
313 "fc_data",
314 EntryBB->getTerminator());
315 Idxs[1] = ConstantInt::get(Int32Ty, 1);
316 Value *SelectorAddr = GetElementPtrInst::Create(FCData, Idxs, Idxs+2,
317 "exc_selector_gep",
318 EntryBB->getTerminator());
319 // The exception value comes back in context->data[0]
320 Idxs[1] = Zero;
321 Value *ExceptionAddr = GetElementPtrInst::Create(FCData, Idxs, Idxs+2,
322 "exception_gep",
323 EntryBB->getTerminator());
324
325 // Find the eh.selector.* and eh.exception calls. We'll use the first
326 // ex.selector to determine the right personality function to use. For
327 // SJLJ, we always use the same personality for the whole function,
328 // not on a per-selector basis.
329 // FIXME: That's a bit ugly. Better way?
Jim Grosbach606f3d62009-08-17 21:40:03 +0000330 SmallVector<CallInst*,16> EH_Selectors;
331 SmallVector<CallInst*,16> EH_Exceptions;
332 for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB) {
Jim Grosbach8b818d72009-08-17 16:41:22 +0000333 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) {
334 if (CallInst *CI = dyn_cast<CallInst>(I)) {
335 if (CI->getCalledFunction() == Selector32Fn ||
336 CI->getCalledFunction() == Selector64Fn) {
337 if (!PersonalityFn) PersonalityFn = CI->getOperand(2);
338 EH_Selectors.push_back(CI);
339 } else if (CI->getCalledFunction() == ExceptionFn) {
340 EH_Exceptions.push_back(CI);
341 }
342 }
343 }
344 }
345 // The result of the eh.selector call will be replaced with a
346 // a reference to the selector value returned in the function
347 // context. We leave the selector itself so the EH analysis later
348 // can use it.
349 for (int i = 0, e = EH_Selectors.size(); i < e; ++i) {
350 CallInst *I = EH_Selectors[i];
351 Value *SelectorVal = new LoadInst(SelectorAddr, "select_val", true, I);
352 I->replaceAllUsesWith(SelectorVal);
353 }
354 // eh.exception calls are replaced with references to the proper
355 // location in the context. Unlike eh.selector, the eh.exception
356 // calls are removed entirely.
357 for (int i = 0, e = EH_Exceptions.size(); i < e; ++i) {
358 CallInst *I = EH_Exceptions[i];
359 // Possible for there to be duplicates, so check to make sure
360 // the instruction hasn't already been removed.
361 if (!I->getParent()) continue;
362 Value *Val = new LoadInst(ExceptionAddr, "exception", true, I);
Jim Grosbach606f3d62009-08-17 21:40:03 +0000363 Type *Ty = PointerType::getUnqual(Type::getInt8Ty(F.getContext()));
364 Val = CastInst::Create(Instruction::IntToPtr, Val, Ty, "", I);
Jim Grosbach8b818d72009-08-17 16:41:22 +0000365
366 I->replaceAllUsesWith(Val);
367 I->eraseFromParent();
368 }
369
370
371
372
373 // The entry block changes to have the eh.sjlj.setjmp, with a conditional
374 // branch to a dispatch block for non-zero returns. If we return normally,
375 // we're not handling an exception and just register the function context
376 // and continue.
377
378 // Create the dispatch block. The dispatch block is basically a big switch
379 // statement that goes to all of the invoke landing pads.
380 BasicBlock *DispatchBlock =
381 BasicBlock::Create(F.getContext(), "eh.sjlj.setjmp.catch", &F);
382
383 // Insert a load in the Catch block, and a switch on its value. By default,
384 // we go to a block that just does an unwind (which is the correct action
385 // for a standard call).
386 BasicBlock *UnwindBlock = BasicBlock::Create(F.getContext(), "unwindbb", &F);
387 Unwinds.push_back(new UnwindInst(F.getContext(), UnwindBlock));
388
389 Value *DispatchLoad = new LoadInst(CallSite, "invoke.num", true,
390 DispatchBlock);
391 SwitchInst *DispatchSwitch =
392 SwitchInst::Create(DispatchLoad, UnwindBlock, Invokes.size(), DispatchBlock);
393 // Split the entry block to insert the conditional branch for the setjmp.
394 BasicBlock *ContBlock = EntryBB->splitBasicBlock(EntryBB->getTerminator(),
395 "eh.sjlj.setjmp.cont");
396
397 // Populate the Function Context
398 // 1. LSDA address
399 // 2. Personality function address
400 // 3. jmpbuf (save FP and call eh.sjlj.setjmp)
401
402 // LSDA address
403 Idxs[0] = Zero;
404 Idxs[1] = ConstantInt::get(Int32Ty, 4);
405 Value *LSDAFieldPtr =
406 GetElementPtrInst::Create(FunctionContext, Idxs, Idxs+2,
407 "lsda_gep",
408 EntryBB->getTerminator());
409 Value *LSDA = CallInst::Create(LSDAAddrFn, "lsda_addr",
410 EntryBB->getTerminator());
411 new StoreInst(LSDA, LSDAFieldPtr, true, EntryBB->getTerminator());
412
413 Idxs[1] = ConstantInt::get(Int32Ty, 3);
414 Value *PersonalityFieldPtr =
415 GetElementPtrInst::Create(FunctionContext, Idxs, Idxs+2,
416 "lsda_gep",
417 EntryBB->getTerminator());
418 new StoreInst(PersonalityFn, PersonalityFieldPtr, true,
419 EntryBB->getTerminator());
420
421 // Save the frame pointer.
422 Idxs[1] = ConstantInt::get(Int32Ty, 5);
423 Value *FieldPtr
424 = GetElementPtrInst::Create(FunctionContext, Idxs, Idxs+2,
425 "jbuf_gep",
426 EntryBB->getTerminator());
427 Idxs[1] = ConstantInt::get(Int32Ty, 0);
428 Value *ElemPtr =
429 GetElementPtrInst::Create(FieldPtr, Idxs, Idxs+2, "jbuf_fp_gep",
430 EntryBB->getTerminator());
431
432 Value *Val = CallInst::Create(FrameAddrFn,
433 ConstantInt::get(Int32Ty, 0),
434 "fp",
435 EntryBB->getTerminator());
436 new StoreInst(Val, ElemPtr, true, EntryBB->getTerminator());
437 // Call the setjmp instrinsic. It fills in the rest of the jmpbuf
438 Value *SetjmpArg =
439 CastInst::Create(Instruction::BitCast, FieldPtr,
440 Type::getInt8Ty(F.getContext())->getPointerTo(), "",
441 EntryBB->getTerminator());
442 Value *DispatchVal = CallInst::Create(BuiltinSetjmpFn, SetjmpArg,
443 "dispatch",
444 EntryBB->getTerminator());
445 // check the return value of the setjmp. non-zero goes to dispatcher
446 Value *IsNormal = new ICmpInst(EntryBB->getTerminator(),
447 ICmpInst::ICMP_EQ, DispatchVal, Zero,
448 "notunwind");
449 // Nuke the uncond branch.
450 EntryBB->getTerminator()->eraseFromParent();
451
452 // Put in a new condbranch in its place.
453 BranchInst::Create(ContBlock, DispatchBlock, IsNormal, EntryBB);
454
455 // Register the function context and make sure it's known to not throw
456 CallInst *Register =
457 CallInst::Create(RegisterFn, FunctionContext, "",
458 ContBlock->getTerminator());
459 Register->setDoesNotThrow();
460
Jim Grosbach8fc3b692009-08-20 01:03:48 +0000461 // At this point, we are all set up. Update the invoke instructions
Jim Grosbach8b818d72009-08-17 16:41:22 +0000462 // to mark their call_site values, and fill in the dispatch switch
463 // accordingly.
Jim Grosbach8fc3b692009-08-20 01:03:48 +0000464 DenseMap<BasicBlock*,unsigned> PadSites;
465 unsigned NextCallSiteValue = 1;
466 for (SmallVector<InvokeInst*,16>::iterator I = Invokes.begin(),
467 E = Invokes.end(); I < E; ++I) {
468 unsigned CallSiteValue;
469 BasicBlock *LandingPad = (*I)->getSuccessor(1);
470 // landing pads can be shared. If we see a landing pad again, we
471 // want to make sure to use the same call site index so the dispatch
472 // will go to the right place.
473 CallSiteValue = PadSites[LandingPad];
474 if (!CallSiteValue) {
475 CallSiteValue = NextCallSiteValue++;
476 PadSites[LandingPad] = CallSiteValue;
477 // Add a switch case to our unwind block. The runtime comes back
478 // to the dispatcher with the call_site - 1 in the context. Odd,
479 // but there it is.
480 ConstantInt *SwitchValC =
481 ConstantInt::get(Type::getInt32Ty((*I)->getContext()),
482 CallSiteValue - 1);
483 DispatchSwitch->addCase(SwitchValC, (*I)->getUnwindDest());
484 }
485 markInvokeCallSite(*I, CallSiteValue, CallSite);
486 }
Jim Grosbach8b818d72009-08-17 16:41:22 +0000487
488 // The front end has likely added calls to _Unwind_Resume. We need
489 // to find those calls and mark the call_site as -1 immediately prior.
490 // resume is a noreturn function, so any block that has a call to it
491 // should end in an 'unreachable' instruction with the call immediately
492 // prior. That's how we'll search.
493 // ??? There's got to be a better way. this is fugly.
494 for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB)
495 if ((dyn_cast<UnreachableInst>(BB->getTerminator()))) {
496 BasicBlock::iterator I = BB->getTerminator();
497 // Check the previous instruction and see if it's a resume call
498 if (I == BB->begin()) continue;
499 if (CallInst *CI = dyn_cast<CallInst>(--I)) {
500 if (CI->getCalledFunction() == ResumeFn) {
Daniel Dunbar5d17edd2009-08-17 18:41:42 +0000501 Value *NegativeOne = Constant::getAllOnesValue(Int32Ty);
Jim Grosbach8b818d72009-08-17 16:41:22 +0000502 new StoreInst(NegativeOne, CallSite, true, I); // volatile
503 }
504 }
505 }
506
507 // Replace all unwinds with a branch to the unwind handler.
508 // ??? Should this ever happen with sjlj exceptions?
509 for (unsigned i = 0, e = Unwinds.size(); i != e; ++i) {
510 BranchInst::Create(UnwindBlock, Unwinds[i]);
511 Unwinds[i]->eraseFromParent();
512 }
513
Jim Grosbach8fc3b692009-08-20 01:03:48 +0000514 // Scan the whole function for values that are live across unwind edges.
515 // Each value that is live across an unwind edge we spill into a stack
516 // location, guaranteeing that there is nothing live across the unwind
517 // edge. This process also splits all critical edges coming out of
518 // invoke's.
519 splitLiveRangesLiveAcrossInvokes(Invokes);
520
Jim Grosbach8b818d72009-08-17 16:41:22 +0000521 // Finally, for any returns from this function, if this function contains an
522 // invoke, add a call to unregister the function context.
523 for (unsigned i = 0, e = Returns.size(); i != e; ++i)
524 CallInst::Create(UnregisterFn, FunctionContext, "", Returns[i]);
525 }
526
527 return true;
528}
529
530bool SjLjEHPass::runOnFunction(Function &F) {
531 bool Res = insertSjLjEHSupport(F);
532 return Res;
533}