Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 1 | //===-- DwarfEHPrepare - Prepare exception handling for code generation ---===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This pass mulches exception handling code into a form adapted to code |
Bill Wendling | 2ad4aca | 2010-03-26 23:41:30 +0000 | [diff] [blame^] | 11 | // generation. Required if using dwarf exception handling. |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 12 | // |
| 13 | //===----------------------------------------------------------------------===// |
| 14 | |
| 15 | #define DEBUG_TYPE "dwarfehprepare" |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 16 | #include "llvm/Function.h" |
| 17 | #include "llvm/Instructions.h" |
| 18 | #include "llvm/IntrinsicInst.h" |
| 19 | #include "llvm/Module.h" |
| 20 | #include "llvm/Pass.h" |
Bill Wendling | 2ad4aca | 2010-03-26 23:41:30 +0000 | [diff] [blame^] | 21 | #include "llvm/ADT/Statistic.h" |
| 22 | #include "llvm/Analysis/Dominators.h" |
| 23 | #include "llvm/CodeGen/Passes.h" |
Jim Grosbach | 8d77cc8 | 2010-01-20 23:03:55 +0000 | [diff] [blame] | 24 | #include "llvm/MC/MCAsmInfo.h" |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 25 | #include "llvm/Target/TargetLowering.h" |
| 26 | #include "llvm/Transforms/Utils/BasicBlockUtils.h" |
| 27 | #include "llvm/Transforms/Utils/PromoteMemToReg.h" |
| 28 | using namespace llvm; |
| 29 | |
Bill Wendling | f58898f | 2009-10-29 00:22:16 +0000 | [diff] [blame] | 30 | STATISTIC(NumLandingPadsSplit, "Number of landing pads split"); |
Bill Wendling | f58898f | 2009-10-29 00:22:16 +0000 | [diff] [blame] | 31 | STATISTIC(NumUnwindsLowered, "Number of unwind instructions lowered"); |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 32 | STATISTIC(NumExceptionValuesMoved, "Number of eh.exception calls moved"); |
| 33 | STATISTIC(NumStackTempsIntroduced, "Number of stack temporaries introduced"); |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 34 | |
| 35 | namespace { |
Nick Lewycky | 6726b6d | 2009-10-25 06:33:48 +0000 | [diff] [blame] | 36 | class DwarfEHPrepare : public FunctionPass { |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 37 | const TargetLowering *TLI; |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 38 | bool CompileFast; |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 39 | |
| 40 | // The eh.exception intrinsic. |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 41 | Function *ExceptionValueIntrinsic; |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 42 | |
Bill Wendling | 2ad4aca | 2010-03-26 23:41:30 +0000 | [diff] [blame^] | 43 | // The eh.selector intrinsic. |
| 44 | Function *SelectorIntrinsic; |
| 45 | |
| 46 | // _Unwind_Resume_or_Rethrow call. |
| 47 | Constant *URoR; |
| 48 | |
| 49 | // The EH language-specific catch-all type. |
| 50 | GlobalVariable *EHCatchAllValue; |
| 51 | |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 52 | // _Unwind_Resume or the target equivalent. |
| 53 | Constant *RewindFunction; |
| 54 | |
| 55 | // Dominator info is used when turning stack temporaries into registers. |
| 56 | DominatorTree *DT; |
| 57 | DominanceFrontier *DF; |
| 58 | |
| 59 | // The function we are running on. |
| 60 | Function *F; |
| 61 | |
| 62 | // The landing pads for this function. |
| 63 | typedef SmallPtrSet<BasicBlock*, 8> BBSet; |
| 64 | BBSet LandingPads; |
| 65 | |
| 66 | // Stack temporary used to hold eh.exception values. |
| 67 | AllocaInst *ExceptionValueVar; |
| 68 | |
| 69 | bool NormalizeLandingPads(); |
| 70 | bool LowerUnwinds(); |
| 71 | bool MoveExceptionValueCalls(); |
| 72 | bool FinishStackTemporaries(); |
| 73 | bool PromoteStackTemporaries(); |
| 74 | |
| 75 | Instruction *CreateExceptionValueCall(BasicBlock *BB); |
| 76 | Instruction *CreateValueLoad(BasicBlock *BB); |
| 77 | |
| 78 | /// CreateReadOfExceptionValue - Return the result of the eh.exception |
Bill Wendling | 2ad4aca | 2010-03-26 23:41:30 +0000 | [diff] [blame^] | 79 | /// intrinsic by calling the intrinsic if in a landing pad, or loading it |
| 80 | /// from the exception value variable otherwise. |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 81 | Instruction *CreateReadOfExceptionValue(BasicBlock *BB) { |
| 82 | return LandingPads.count(BB) ? |
| 83 | CreateExceptionValueCall(BB) : CreateValueLoad(BB); |
| 84 | } |
| 85 | |
Bill Wendling | 2ad4aca | 2010-03-26 23:41:30 +0000 | [diff] [blame^] | 86 | /// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow" |
| 87 | /// calls. The "unwind" part of these invokes jump to a landing pad within |
| 88 | /// the current function. This is a candidate to merge the selector |
| 89 | /// associated with the URoR invoke with the one from the URoR's landing |
| 90 | /// pad. |
| 91 | bool HandleURoRInvokes(); |
| 92 | |
| 93 | /// FindSelectorAndURoR - Find the eh.selector call and URoR call associated |
| 94 | /// with the eh.exception call. This recursively looks past instructions |
| 95 | /// which don't change the EH pointer value, like casts or PHI nodes. |
| 96 | bool FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke, |
| 97 | SmallPtrSet<IntrinsicInst*, 8> &SelCalls); |
| 98 | |
| 99 | /// DoMem2RegPromotion - Take an alloca call and promote it from memory to a |
| 100 | /// register. |
| 101 | bool DoMem2RegPromotion(Value *V) { |
| 102 | AllocaInst *AI = dyn_cast<AllocaInst>(V); |
| 103 | if (!AI || !isAllocaPromotable(AI)) return false; |
| 104 | |
| 105 | // Turn the alloca into a register. |
| 106 | std::vector<AllocaInst*> Allocas(1, AI); |
| 107 | PromoteMemToReg(Allocas, *DT, *DF); |
| 108 | return true; |
| 109 | } |
| 110 | |
| 111 | /// PromoteStoreInst - Perform Mem2Reg on a StoreInst. |
| 112 | bool PromoteStoreInst(StoreInst *SI) { |
| 113 | if (!SI || !DT || !DF) return false; |
| 114 | if (DoMem2RegPromotion(SI->getOperand(1))) |
| 115 | return true; |
| 116 | return false; |
| 117 | } |
| 118 | |
| 119 | /// PromoteEHPtrStore - Promote the storing of an EH pointer into a |
| 120 | /// register. This should get rid of the store and subsequent loads. |
| 121 | bool PromoteEHPtrStore(IntrinsicInst *II) { |
| 122 | if (!DT || !DF) return false; |
| 123 | |
| 124 | bool Changed = false; |
| 125 | StoreInst *SI; |
| 126 | |
| 127 | while (1) { |
| 128 | SI = 0; |
| 129 | for (Value::use_iterator |
| 130 | I = II->use_begin(), E = II->use_end(); I != E; ++I) { |
| 131 | SI = dyn_cast<StoreInst>(I); |
| 132 | if (SI) break; |
| 133 | } |
| 134 | |
| 135 | if (!PromoteStoreInst(SI)) |
| 136 | break; |
| 137 | |
| 138 | Changed = true; |
| 139 | } |
| 140 | |
| 141 | return false; |
| 142 | } |
| 143 | |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 144 | public: |
| 145 | static char ID; // Pass identification, replacement for typeid. |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 146 | DwarfEHPrepare(const TargetLowering *tli, bool fast) : |
| 147 | FunctionPass(&ID), TLI(tli), CompileFast(fast), |
Bill Wendling | 2ad4aca | 2010-03-26 23:41:30 +0000 | [diff] [blame^] | 148 | ExceptionValueIntrinsic(0), SelectorIntrinsic(0), |
| 149 | URoR(0), EHCatchAllValue(0), RewindFunction(0) {} |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 150 | |
| 151 | virtual bool runOnFunction(Function &Fn); |
| 152 | |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 153 | // getAnalysisUsage - We need dominance frontiers for memory promotion. |
| 154 | virtual void getAnalysisUsage(AnalysisUsage &AU) const { |
| 155 | if (!CompileFast) |
| 156 | AU.addRequired<DominatorTree>(); |
| 157 | AU.addPreserved<DominatorTree>(); |
| 158 | if (!CompileFast) |
| 159 | AU.addRequired<DominanceFrontier>(); |
| 160 | AU.addPreserved<DominanceFrontier>(); |
| 161 | } |
| 162 | |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 163 | const char *getPassName() const { |
| 164 | return "Exception handling preparation"; |
| 165 | } |
| 166 | |
| 167 | }; |
| 168 | } // end anonymous namespace |
| 169 | |
| 170 | char DwarfEHPrepare::ID = 0; |
| 171 | |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 172 | FunctionPass *llvm::createDwarfEHPass(const TargetLowering *tli, bool fast) { |
| 173 | return new DwarfEHPrepare(tli, fast); |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 174 | } |
| 175 | |
Bill Wendling | 2ad4aca | 2010-03-26 23:41:30 +0000 | [diff] [blame^] | 176 | /// FindSelectorAndURoR - Find the eh.selector call associated with the |
| 177 | /// eh.exception call. And indicate if there is a URoR "invoke" associated with |
| 178 | /// the eh.exception call. This recursively looks past instructions which don't |
| 179 | /// change the EH pointer value, like casts or PHI nodes. |
| 180 | bool |
| 181 | DwarfEHPrepare::FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke, |
| 182 | SmallPtrSet<IntrinsicInst*, 8> &SelCalls) { |
| 183 | SmallPtrSet<PHINode*, 32> SeenPHIs; |
| 184 | bool Changed = false; |
| 185 | |
| 186 | restart: |
| 187 | for (Value::use_iterator |
| 188 | I = Inst->use_begin(), E = Inst->use_end(); I != E; ++I) { |
| 189 | Instruction *II = dyn_cast<Instruction>(I); |
| 190 | if (!II || II->getParent()->getParent() != F) continue; |
| 191 | |
| 192 | if (IntrinsicInst *Sel = dyn_cast<IntrinsicInst>(II)) { |
| 193 | if (Sel->getIntrinsicID() == Intrinsic::eh_selector) |
| 194 | SelCalls.insert(Sel); |
| 195 | } else if (InvokeInst *Invoke = dyn_cast<InvokeInst>(II)) { |
| 196 | if (Invoke->getCalledFunction() == URoR) |
| 197 | URoRInvoke = true; |
| 198 | } else if (CastInst *CI = dyn_cast<CastInst>(II)) { |
| 199 | Changed |= FindSelectorAndURoR(CI, URoRInvoke, SelCalls); |
| 200 | } else if (StoreInst *SI = dyn_cast<StoreInst>(II)) { |
| 201 | if (!PromoteStoreInst(SI)) continue; |
| 202 | Changed = true; |
| 203 | SeenPHIs.clear(); |
| 204 | goto restart; // Uses may have changed, restart loop. |
| 205 | } else if (PHINode *PN = dyn_cast<PHINode>(II)) { |
| 206 | if (SeenPHIs.insert(PN)) |
| 207 | // Don't process a PHI node more than once. |
| 208 | Changed |= FindSelectorAndURoR(PN, URoRInvoke, SelCalls); |
| 209 | } |
| 210 | } |
| 211 | |
| 212 | return Changed; |
| 213 | } |
| 214 | |
| 215 | /// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow" calls. The |
| 216 | /// "unwind" part of these invokes jump to a landing pad within the current |
| 217 | /// function. This is a candidate to merge the selector associated with the URoR |
| 218 | /// invoke with the one from the URoR's landing pad. |
| 219 | bool DwarfEHPrepare::HandleURoRInvokes() { |
| 220 | if (!EHCatchAllValue) { |
| 221 | EHCatchAllValue = |
| 222 | F->getParent()->getNamedGlobal(".llvm.eh.catch.all.value"); |
| 223 | if (!EHCatchAllValue) return false; |
| 224 | } |
| 225 | |
| 226 | if (!URoR) { |
| 227 | URoR = F->getParent()->getFunction("_Unwind_Resume_or_Rethrow"); |
| 228 | if (!URoR) return false; |
| 229 | } |
| 230 | |
| 231 | if (!ExceptionValueIntrinsic) { |
| 232 | ExceptionValueIntrinsic = |
| 233 | Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_exception); |
| 234 | if (!ExceptionValueIntrinsic) return false; |
| 235 | } |
| 236 | |
| 237 | if (!SelectorIntrinsic) { |
| 238 | SelectorIntrinsic = |
| 239 | Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector); |
| 240 | if (!SelectorIntrinsic) return false; |
| 241 | } |
| 242 | |
| 243 | bool Changed = false; |
| 244 | SmallPtrSet<IntrinsicInst*, 32> SelsToConvert; |
| 245 | |
| 246 | for (Value::use_iterator |
| 247 | I = ExceptionValueIntrinsic->use_begin(), |
| 248 | E = ExceptionValueIntrinsic->use_end(); I != E; ++I) { |
| 249 | IntrinsicInst *EHPtr = dyn_cast<IntrinsicInst>(I); |
| 250 | if (!EHPtr || EHPtr->getParent()->getParent() != F) continue; |
| 251 | |
| 252 | Changed |= PromoteEHPtrStore(EHPtr); |
| 253 | |
| 254 | bool URoRInvoke = false; |
| 255 | SmallPtrSet<IntrinsicInst*, 8> SelCalls; |
| 256 | Changed |= FindSelectorAndURoR(EHPtr, URoRInvoke, SelCalls); |
| 257 | |
| 258 | if (URoRInvoke) { |
| 259 | // This EH pointer is being used by an invoke of an URoR instruction and |
| 260 | // an eh.selector intrinsic call. If the eh.selector is a 'clean-up', we |
| 261 | // need to convert it to a 'catch-all'. |
| 262 | for (SmallPtrSet<IntrinsicInst*, 8>::iterator |
| 263 | SI = SelCalls.begin(), SE = SelCalls.end(); SI != SE; ++SI) { |
| 264 | IntrinsicInst *II = *SI; |
| 265 | unsigned NumOps = II->getNumOperands(); |
| 266 | |
| 267 | if (NumOps <= 4) { |
| 268 | bool IsCleanUp = (NumOps == 3); |
| 269 | |
| 270 | if (!IsCleanUp) |
| 271 | if (ConstantInt *CI = dyn_cast<ConstantInt>(II->getOperand(3))) |
| 272 | IsCleanUp = (CI->getZExtValue() == 0); |
| 273 | |
| 274 | if (IsCleanUp) |
| 275 | SelsToConvert.insert(II); |
| 276 | } |
| 277 | } |
| 278 | } |
| 279 | } |
| 280 | |
| 281 | if (!SelsToConvert.empty()) { |
| 282 | // Convert all clean-up eh.selectors, which are associated with "invokes" of |
| 283 | // URoR calls, into catch-all eh.selectors. |
| 284 | Changed = true; |
| 285 | |
| 286 | for (SmallPtrSet<IntrinsicInst*, 8>::iterator |
| 287 | SI = SelsToConvert.begin(), SE = SelsToConvert.end(); |
| 288 | SI != SE; ++SI) { |
| 289 | IntrinsicInst *II = *SI; |
| 290 | SmallVector<Value*, 8> Args; |
| 291 | |
| 292 | // Use the exception object pointer and the personality function |
| 293 | // from the original selector. |
| 294 | Args.push_back(II->getOperand(1)); // Exception object pointer. |
| 295 | Args.push_back(II->getOperand(2)); // Personality function. |
| 296 | Args.push_back(EHCatchAllValue->getInitializer()); // Catch-all indicator. |
| 297 | |
| 298 | CallInst *NewSelector = |
| 299 | CallInst::Create(SelectorIntrinsic, Args.begin(), Args.end(), |
| 300 | "eh.sel.catch.all", II); |
| 301 | |
| 302 | NewSelector->setTailCall(II->isTailCall()); |
| 303 | NewSelector->setAttributes(II->getAttributes()); |
| 304 | NewSelector->setCallingConv(II->getCallingConv()); |
| 305 | |
| 306 | II->replaceAllUsesWith(NewSelector); |
| 307 | II->eraseFromParent(); |
| 308 | } |
| 309 | } |
| 310 | |
| 311 | return Changed; |
| 312 | } |
| 313 | |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 314 | /// NormalizeLandingPads - Normalize and discover landing pads, noting them |
| 315 | /// in the LandingPads set. A landing pad is normal if the only CFG edges |
Eric Christopher | ec26bf7 | 2009-09-15 21:56:46 +0000 | [diff] [blame] | 316 | /// that end at it are unwind edges from invoke instructions. If we inlined |
| 317 | /// through an invoke we could have a normal branch from the previous |
| 318 | /// unwind block through to the landing pad for the original invoke. |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 319 | /// Abnormal landing pads are fixed up by redirecting all unwind edges to |
| 320 | /// a new basic block which falls through to the original. |
| 321 | bool DwarfEHPrepare::NormalizeLandingPads() { |
| 322 | bool Changed = false; |
| 323 | |
Jim Grosbach | 8d77cc8 | 2010-01-20 23:03:55 +0000 | [diff] [blame] | 324 | const MCAsmInfo *MAI = TLI->getTargetMachine().getMCAsmInfo(); |
| 325 | bool usingSjLjEH = MAI->getExceptionHandlingType() == ExceptionHandling::SjLj; |
| 326 | |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 327 | for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) { |
| 328 | TerminatorInst *TI = I->getTerminator(); |
| 329 | if (!isa<InvokeInst>(TI)) |
| 330 | continue; |
| 331 | BasicBlock *LPad = TI->getSuccessor(1); |
| 332 | // Skip landing pads that have already been normalized. |
| 333 | if (LandingPads.count(LPad)) |
| 334 | continue; |
| 335 | |
| 336 | // Check that only invoke unwind edges end at the landing pad. |
| 337 | bool OnlyUnwoundTo = true; |
Jim Grosbach | 8d77cc8 | 2010-01-20 23:03:55 +0000 | [diff] [blame] | 338 | bool SwitchOK = usingSjLjEH; |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 339 | for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad); |
| 340 | PI != PE; ++PI) { |
| 341 | TerminatorInst *PT = (*PI)->getTerminator(); |
Jim Grosbach | 8d77cc8 | 2010-01-20 23:03:55 +0000 | [diff] [blame] | 342 | // The SjLj dispatch block uses a switch instruction. This is effectively |
| 343 | // an unwind edge, so we can disregard it here. There will only ever |
| 344 | // be one dispatch, however, so if there are multiple switches, one |
| 345 | // of them truly is a normal edge, not an unwind edge. |
| 346 | if (SwitchOK && isa<SwitchInst>(PT)) { |
| 347 | SwitchOK = false; |
| 348 | continue; |
| 349 | } |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 350 | if (!isa<InvokeInst>(PT) || LPad == PT->getSuccessor(0)) { |
| 351 | OnlyUnwoundTo = false; |
| 352 | break; |
| 353 | } |
| 354 | } |
Daniel Dunbar | a279bc3 | 2009-09-20 02:20:51 +0000 | [diff] [blame] | 355 | |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 356 | if (OnlyUnwoundTo) { |
| 357 | // Only unwind edges lead to the landing pad. Remember the landing pad. |
| 358 | LandingPads.insert(LPad); |
| 359 | continue; |
| 360 | } |
| 361 | |
| 362 | // At least one normal edge ends at the landing pad. Redirect the unwind |
| 363 | // edges to a new basic block which falls through into this one. |
| 364 | |
| 365 | // Create the new basic block. |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 366 | BasicBlock *NewBB = BasicBlock::Create(F->getContext(), |
Owen Anderson | 1d0be15 | 2009-08-13 21:58:54 +0000 | [diff] [blame] | 367 | LPad->getName() + "_unwind_edge"); |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 368 | |
| 369 | // Insert it into the function right before the original landing pad. |
| 370 | LPad->getParent()->getBasicBlockList().insert(LPad, NewBB); |
| 371 | |
| 372 | // Redirect unwind edges from the original landing pad to NewBB. |
| 373 | for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad); PI != PE; ) { |
| 374 | TerminatorInst *PT = (*PI++)->getTerminator(); |
| 375 | if (isa<InvokeInst>(PT) && PT->getSuccessor(1) == LPad) |
| 376 | // Unwind to the new block. |
| 377 | PT->setSuccessor(1, NewBB); |
| 378 | } |
| 379 | |
| 380 | // If there are any PHI nodes in LPad, we need to update them so that they |
| 381 | // merge incoming values from NewBB instead. |
| 382 | for (BasicBlock::iterator II = LPad->begin(); isa<PHINode>(II); ++II) { |
| 383 | PHINode *PN = cast<PHINode>(II); |
| 384 | pred_iterator PB = pred_begin(NewBB), PE = pred_end(NewBB); |
| 385 | |
| 386 | // Check to see if all of the values coming in via unwind edges are the |
| 387 | // same. If so, we don't need to create a new PHI node. |
| 388 | Value *InVal = PN->getIncomingValueForBlock(*PB); |
| 389 | for (pred_iterator PI = PB; PI != PE; ++PI) { |
| 390 | if (PI != PB && InVal != PN->getIncomingValueForBlock(*PI)) { |
| 391 | InVal = 0; |
| 392 | break; |
| 393 | } |
| 394 | } |
| 395 | |
| 396 | if (InVal == 0) { |
| 397 | // Different unwind edges have different values. Create a new PHI node |
| 398 | // in NewBB. |
| 399 | PHINode *NewPN = PHINode::Create(PN->getType(), PN->getName()+".unwind", |
| 400 | NewBB); |
| 401 | // Add an entry for each unwind edge, using the value from the old PHI. |
| 402 | for (pred_iterator PI = PB; PI != PE; ++PI) |
| 403 | NewPN->addIncoming(PN->getIncomingValueForBlock(*PI), *PI); |
| 404 | |
| 405 | // Now use this new PHI as the common incoming value for NewBB in PN. |
| 406 | InVal = NewPN; |
| 407 | } |
| 408 | |
| 409 | // Revector exactly one entry in the PHI node to come from NewBB |
| 410 | // and delete all other entries that come from unwind edges. If |
| 411 | // there are both normal and unwind edges from the same predecessor, |
| 412 | // this leaves an entry for the normal edge. |
| 413 | for (pred_iterator PI = PB; PI != PE; ++PI) |
| 414 | PN->removeIncomingValue(*PI); |
| 415 | PN->addIncoming(InVal, NewBB); |
| 416 | } |
| 417 | |
| 418 | // Add a fallthrough from NewBB to the original landing pad. |
| 419 | BranchInst::Create(LPad, NewBB); |
| 420 | |
| 421 | // Now update DominatorTree and DominanceFrontier analysis information. |
| 422 | if (DT) |
| 423 | DT->splitBlock(NewBB); |
| 424 | if (DF) |
| 425 | DF->splitBlock(NewBB); |
| 426 | |
| 427 | // Remember the newly constructed landing pad. The original landing pad |
| 428 | // LPad is no longer a landing pad now that all unwind edges have been |
| 429 | // revectored to NewBB. |
| 430 | LandingPads.insert(NewBB); |
| 431 | ++NumLandingPadsSplit; |
| 432 | Changed = true; |
| 433 | } |
| 434 | |
| 435 | return Changed; |
| 436 | } |
| 437 | |
| 438 | /// LowerUnwinds - Turn unwind instructions into calls to _Unwind_Resume, |
| 439 | /// rethrowing any previously caught exception. This will crash horribly |
| 440 | /// at runtime if there is no such exception: using unwind to throw a new |
| 441 | /// exception is currently not supported. |
| 442 | bool DwarfEHPrepare::LowerUnwinds() { |
Bill Wendling | 4348871 | 2009-09-14 20:52:37 +0000 | [diff] [blame] | 443 | SmallVector<TerminatorInst*, 16> UnwindInsts; |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 444 | |
| 445 | for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) { |
| 446 | TerminatorInst *TI = I->getTerminator(); |
Bill Wendling | 4348871 | 2009-09-14 20:52:37 +0000 | [diff] [blame] | 447 | if (isa<UnwindInst>(TI)) |
| 448 | UnwindInsts.push_back(TI); |
| 449 | } |
| 450 | |
| 451 | if (UnwindInsts.empty()) return false; |
| 452 | |
| 453 | // Find the rewind function if we didn't already. |
| 454 | if (!RewindFunction) { |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 455 | LLVMContext &Ctx = UnwindInsts[0]->getContext(); |
Bill Wendling | 4348871 | 2009-09-14 20:52:37 +0000 | [diff] [blame] | 456 | std::vector<const Type*> |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 457 | Params(1, Type::getInt8PtrTy(Ctx)); |
| 458 | FunctionType *FTy = FunctionType::get(Type::getVoidTy(Ctx), |
Bill Wendling | 4348871 | 2009-09-14 20:52:37 +0000 | [diff] [blame] | 459 | Params, false); |
| 460 | const char *RewindName = TLI->getLibcallName(RTLIB::UNWIND_RESUME); |
| 461 | RewindFunction = F->getParent()->getOrInsertFunction(RewindName, FTy); |
| 462 | } |
| 463 | |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 464 | bool Changed = false; |
| 465 | |
Bill Wendling | 4348871 | 2009-09-14 20:52:37 +0000 | [diff] [blame] | 466 | for (SmallVectorImpl<TerminatorInst*>::iterator |
| 467 | I = UnwindInsts.begin(), E = UnwindInsts.end(); I != E; ++I) { |
| 468 | TerminatorInst *TI = *I; |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 469 | |
| 470 | // Replace the unwind instruction with a call to _Unwind_Resume (or the |
| 471 | // appropriate target equivalent) followed by an UnreachableInst. |
| 472 | |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 473 | // Create the call... |
Eric Christopher | 82f149d | 2009-09-04 01:14:14 +0000 | [diff] [blame] | 474 | CallInst *CI = CallInst::Create(RewindFunction, |
Daniel Dunbar | a279bc3 | 2009-09-20 02:20:51 +0000 | [diff] [blame] | 475 | CreateReadOfExceptionValue(TI->getParent()), |
Bill Wendling | 4348871 | 2009-09-14 20:52:37 +0000 | [diff] [blame] | 476 | "", TI); |
Eric Christopher | 82f149d | 2009-09-04 01:14:14 +0000 | [diff] [blame] | 477 | CI->setCallingConv(TLI->getLibcallCallingConv(RTLIB::UNWIND_RESUME)); |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 478 | // ...followed by an UnreachableInst. |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 479 | new UnreachableInst(TI->getContext(), TI); |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 480 | |
| 481 | // Nuke the unwind instruction. |
| 482 | TI->eraseFromParent(); |
| 483 | ++NumUnwindsLowered; |
| 484 | Changed = true; |
| 485 | } |
| 486 | |
| 487 | return Changed; |
| 488 | } |
| 489 | |
| 490 | /// MoveExceptionValueCalls - Ensure that eh.exception is only ever called from |
| 491 | /// landing pads by replacing calls outside of landing pads with loads from a |
| 492 | /// stack temporary. Move eh.exception calls inside landing pads to the start |
| 493 | /// of the landing pad (optional, but may make things simpler for later passes). |
| 494 | bool DwarfEHPrepare::MoveExceptionValueCalls() { |
| 495 | // If the eh.exception intrinsic is not declared in the module then there is |
| 496 | // nothing to do. Speed up compilation by checking for this common case. |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 497 | if (!ExceptionValueIntrinsic && |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 498 | !F->getParent()->getFunction(Intrinsic::getName(Intrinsic::eh_exception))) |
| 499 | return false; |
| 500 | |
| 501 | bool Changed = false; |
| 502 | |
| 503 | for (Function::iterator BB = F->begin(), E = F->end(); BB != E; ++BB) { |
| 504 | for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;) |
| 505 | if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++)) |
| 506 | if (CI->getIntrinsicID() == Intrinsic::eh_exception) { |
| 507 | if (!CI->use_empty()) { |
| 508 | Value *ExceptionValue = CreateReadOfExceptionValue(BB); |
| 509 | if (CI == ExceptionValue) { |
| 510 | // The call was at the start of a landing pad - leave it alone. |
| 511 | assert(LandingPads.count(BB) && |
| 512 | "Created eh.exception call outside landing pad!"); |
| 513 | continue; |
| 514 | } |
| 515 | CI->replaceAllUsesWith(ExceptionValue); |
| 516 | } |
| 517 | CI->eraseFromParent(); |
| 518 | ++NumExceptionValuesMoved; |
| 519 | Changed = true; |
| 520 | } |
| 521 | } |
| 522 | |
| 523 | return Changed; |
| 524 | } |
| 525 | |
| 526 | /// FinishStackTemporaries - If we introduced a stack variable to hold the |
| 527 | /// exception value then initialize it in each landing pad. |
| 528 | bool DwarfEHPrepare::FinishStackTemporaries() { |
| 529 | if (!ExceptionValueVar) |
| 530 | // Nothing to do. |
| 531 | return false; |
| 532 | |
| 533 | bool Changed = false; |
| 534 | |
| 535 | // Make sure that there is a store of the exception value at the start of |
| 536 | // each landing pad. |
| 537 | for (BBSet::iterator LI = LandingPads.begin(), LE = LandingPads.end(); |
| 538 | LI != LE; ++LI) { |
| 539 | Instruction *ExceptionValue = CreateReadOfExceptionValue(*LI); |
| 540 | Instruction *Store = new StoreInst(ExceptionValue, ExceptionValueVar); |
| 541 | Store->insertAfter(ExceptionValue); |
| 542 | Changed = true; |
| 543 | } |
| 544 | |
| 545 | return Changed; |
| 546 | } |
| 547 | |
| 548 | /// PromoteStackTemporaries - Turn any stack temporaries we introduced into |
| 549 | /// registers if possible. |
| 550 | bool DwarfEHPrepare::PromoteStackTemporaries() { |
| 551 | if (ExceptionValueVar && DT && DF && isAllocaPromotable(ExceptionValueVar)) { |
| 552 | // Turn the exception temporary into registers and phi nodes if possible. |
| 553 | std::vector<AllocaInst*> Allocas(1, ExceptionValueVar); |
Nick Lewycky | ce2c51b | 2009-11-23 03:50:44 +0000 | [diff] [blame] | 554 | PromoteMemToReg(Allocas, *DT, *DF); |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 555 | return true; |
| 556 | } |
| 557 | return false; |
| 558 | } |
| 559 | |
| 560 | /// CreateExceptionValueCall - Insert a call to the eh.exception intrinsic at |
| 561 | /// the start of the basic block (unless there already is one, in which case |
| 562 | /// the existing call is returned). |
| 563 | Instruction *DwarfEHPrepare::CreateExceptionValueCall(BasicBlock *BB) { |
| 564 | Instruction *Start = BB->getFirstNonPHI(); |
| 565 | // Is this a call to eh.exception? |
| 566 | if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(Start)) |
| 567 | if (CI->getIntrinsicID() == Intrinsic::eh_exception) |
| 568 | // Reuse the existing call. |
| 569 | return Start; |
| 570 | |
| 571 | // Find the eh.exception intrinsic if we didn't already. |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 572 | if (!ExceptionValueIntrinsic) |
| 573 | ExceptionValueIntrinsic = Intrinsic::getDeclaration(F->getParent(), |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 574 | Intrinsic::eh_exception); |
| 575 | |
| 576 | // Create the call. |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 577 | return CallInst::Create(ExceptionValueIntrinsic, "eh.value.call", Start); |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 578 | } |
| 579 | |
| 580 | /// CreateValueLoad - Insert a load of the exception value stack variable |
| 581 | /// (creating it if necessary) at the start of the basic block (unless |
| 582 | /// there already is a load, in which case the existing load is returned). |
| 583 | Instruction *DwarfEHPrepare::CreateValueLoad(BasicBlock *BB) { |
| 584 | Instruction *Start = BB->getFirstNonPHI(); |
| 585 | // Is this a load of the exception temporary? |
| 586 | if (ExceptionValueVar) |
| 587 | if (LoadInst* LI = dyn_cast<LoadInst>(Start)) |
| 588 | if (LI->getPointerOperand() == ExceptionValueVar) |
| 589 | // Reuse the existing load. |
| 590 | return Start; |
| 591 | |
| 592 | // Create the temporary if we didn't already. |
| 593 | if (!ExceptionValueVar) { |
Owen Anderson | 1d0be15 | 2009-08-13 21:58:54 +0000 | [diff] [blame] | 594 | ExceptionValueVar = new AllocaInst(PointerType::getUnqual( |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 595 | Type::getInt8Ty(BB->getContext())), "eh.value", F->begin()->begin()); |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 596 | ++NumStackTempsIntroduced; |
| 597 | } |
| 598 | |
| 599 | // Load the value. |
| 600 | return new LoadInst(ExceptionValueVar, "eh.value.load", Start); |
| 601 | } |
| 602 | |
| 603 | bool DwarfEHPrepare::runOnFunction(Function &Fn) { |
| 604 | bool Changed = false; |
| 605 | |
| 606 | // Initialize internal state. |
| 607 | DT = getAnalysisIfAvailable<DominatorTree>(); |
| 608 | DF = getAnalysisIfAvailable<DominanceFrontier>(); |
| 609 | ExceptionValueVar = 0; |
| 610 | F = &Fn; |
| 611 | |
| 612 | // Ensure that only unwind edges end at landing pads (a landing pad is a |
| 613 | // basic block where an invoke unwind edge ends). |
| 614 | Changed |= NormalizeLandingPads(); |
| 615 | |
| 616 | // Turn unwind instructions into libcalls. |
| 617 | Changed |= LowerUnwinds(); |
| 618 | |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 619 | // TODO: Move eh.selector calls to landing pads and combine them. |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 620 | |
| 621 | // Move eh.exception calls to landing pads. |
| 622 | Changed |= MoveExceptionValueCalls(); |
| 623 | |
| 624 | // Initialize any stack temporaries we introduced. |
| 625 | Changed |= FinishStackTemporaries(); |
| 626 | |
| 627 | // Turn any stack temporaries into registers if possible. |
Bill Wendling | 8bedf97 | 2009-10-29 00:37:35 +0000 | [diff] [blame] | 628 | if (!CompileFast) |
| 629 | Changed |= PromoteStackTemporaries(); |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 630 | |
Bill Wendling | 2ad4aca | 2010-03-26 23:41:30 +0000 | [diff] [blame^] | 631 | Changed |= HandleURoRInvokes(); |
| 632 | |
Duncan Sands | b0f1e17 | 2009-05-22 20:36:31 +0000 | [diff] [blame] | 633 | LandingPads.clear(); |
| 634 | |
| 635 | return Changed; |
| 636 | } |