Philip Reames | f27f373 | 2015-01-15 19:29:42 +0000 | [diff] [blame^] | 1 | //===-- GCRootLowering.cpp - Garbage collection infrastructure ------------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file implements the lowering for the gc.root mechanism. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #include "llvm/CodeGen/GCStrategy.h" |
| 15 | #include "llvm/CodeGen/MachineFrameInfo.h" |
| 16 | #include "llvm/CodeGen/MachineFunctionPass.h" |
| 17 | #include "llvm/CodeGen/MachineInstrBuilder.h" |
| 18 | #include "llvm/CodeGen/MachineModuleInfo.h" |
| 19 | #include "llvm/CodeGen/Passes.h" |
| 20 | #include "llvm/IR/Dominators.h" |
| 21 | #include "llvm/IR/IntrinsicInst.h" |
| 22 | #include "llvm/IR/Module.h" |
| 23 | #include "llvm/Support/Debug.h" |
| 24 | #include "llvm/Support/ErrorHandling.h" |
| 25 | #include "llvm/Support/raw_ostream.h" |
| 26 | #include "llvm/Target/TargetFrameLowering.h" |
| 27 | #include "llvm/Target/TargetInstrInfo.h" |
| 28 | #include "llvm/Target/TargetMachine.h" |
| 29 | #include "llvm/Target/TargetRegisterInfo.h" |
| 30 | #include "llvm/Target/TargetSubtargetInfo.h" |
| 31 | |
| 32 | using namespace llvm; |
| 33 | |
| 34 | namespace { |
| 35 | |
| 36 | /// LowerIntrinsics - This pass rewrites calls to the llvm.gcread or |
| 37 | /// llvm.gcwrite intrinsics, replacing them with simple loads and stores as |
| 38 | /// directed by the GCStrategy. It also performs automatic root initialization |
| 39 | /// and custom intrinsic lowering. |
| 40 | class LowerIntrinsics : public FunctionPass { |
| 41 | static bool NeedsDefaultLoweringPass(const GCStrategy &C); |
| 42 | static bool NeedsCustomLoweringPass(const GCStrategy &C); |
| 43 | static bool CouldBecomeSafePoint(Instruction *I); |
| 44 | bool PerformDefaultLowering(Function &F, GCStrategy &Coll); |
| 45 | static bool InsertRootInitializers(Function &F, |
| 46 | AllocaInst **Roots, unsigned Count); |
| 47 | |
| 48 | public: |
| 49 | static char ID; |
| 50 | |
| 51 | LowerIntrinsics(); |
| 52 | const char *getPassName() const override; |
| 53 | void getAnalysisUsage(AnalysisUsage &AU) const override; |
| 54 | |
| 55 | bool doInitialization(Module &M) override; |
| 56 | bool runOnFunction(Function &F) override; |
| 57 | }; |
| 58 | |
| 59 | |
| 60 | /// GCMachineCodeAnalysis - This is a target-independent pass over the machine |
| 61 | /// function representation to identify safe points for the garbage collector |
| 62 | /// in the machine code. It inserts labels at safe points and populates a |
| 63 | /// GCMetadata record for each function. |
| 64 | class GCMachineCodeAnalysis : public MachineFunctionPass { |
| 65 | const TargetMachine *TM; |
| 66 | GCFunctionInfo *FI; |
| 67 | MachineModuleInfo *MMI; |
| 68 | const TargetInstrInfo *TII; |
| 69 | |
| 70 | void FindSafePoints(MachineFunction &MF); |
| 71 | void VisitCallPoint(MachineBasicBlock::iterator MI); |
| 72 | MCSymbol *InsertLabel(MachineBasicBlock &MBB, |
| 73 | MachineBasicBlock::iterator MI, |
| 74 | DebugLoc DL) const; |
| 75 | |
| 76 | void FindStackOffsets(MachineFunction &MF); |
| 77 | |
| 78 | public: |
| 79 | static char ID; |
| 80 | |
| 81 | GCMachineCodeAnalysis(); |
| 82 | void getAnalysisUsage(AnalysisUsage &AU) const override; |
| 83 | |
| 84 | bool runOnMachineFunction(MachineFunction &MF) override; |
| 85 | }; |
| 86 | |
| 87 | } |
| 88 | |
| 89 | // ----------------------------------------------------------------------------- |
| 90 | |
| 91 | INITIALIZE_PASS_BEGIN(LowerIntrinsics, "gc-lowering", "GC Lowering", |
| 92 | false, false) |
| 93 | INITIALIZE_PASS_DEPENDENCY(GCModuleInfo) |
| 94 | INITIALIZE_PASS_END(LowerIntrinsics, "gc-lowering", "GC Lowering", false, false) |
| 95 | |
| 96 | FunctionPass *llvm::createGCLoweringPass() { |
| 97 | return new LowerIntrinsics(); |
| 98 | } |
| 99 | |
| 100 | char LowerIntrinsics::ID = 0; |
| 101 | |
| 102 | LowerIntrinsics::LowerIntrinsics() |
| 103 | : FunctionPass(ID) { |
| 104 | initializeLowerIntrinsicsPass(*PassRegistry::getPassRegistry()); |
| 105 | } |
| 106 | |
| 107 | const char *LowerIntrinsics::getPassName() const { |
| 108 | return "Lower Garbage Collection Instructions"; |
| 109 | } |
| 110 | |
| 111 | void LowerIntrinsics::getAnalysisUsage(AnalysisUsage &AU) const { |
| 112 | FunctionPass::getAnalysisUsage(AU); |
| 113 | AU.addRequired<GCModuleInfo>(); |
| 114 | AU.addPreserved<DominatorTreeWrapperPass>(); |
| 115 | } |
| 116 | |
| 117 | /// doInitialization - If this module uses the GC intrinsics, find them now. |
| 118 | bool LowerIntrinsics::doInitialization(Module &M) { |
| 119 | // FIXME: This is rather antisocial in the context of a JIT since it performs |
| 120 | // work against the entire module. But this cannot be done at |
| 121 | // runFunction time (initializeCustomLowering likely needs to change |
| 122 | // the module). |
| 123 | GCModuleInfo *MI = getAnalysisIfAvailable<GCModuleInfo>(); |
| 124 | assert(MI && "LowerIntrinsics didn't require GCModuleInfo!?"); |
| 125 | for (Module::iterator I = M.begin(), E = M.end(); I != E; ++I) |
| 126 | if (!I->isDeclaration() && I->hasGC()) |
| 127 | MI->getFunctionInfo(*I); // Instantiate the GC strategy. |
| 128 | |
| 129 | bool MadeChange = false; |
| 130 | for (GCModuleInfo::iterator I = MI->begin(), E = MI->end(); I != E; ++I) |
| 131 | if (NeedsCustomLoweringPass(**I)) |
| 132 | if ((*I)->initializeCustomLowering(M)) |
| 133 | MadeChange = true; |
| 134 | |
| 135 | return MadeChange; |
| 136 | } |
| 137 | |
| 138 | bool LowerIntrinsics::InsertRootInitializers(Function &F, AllocaInst **Roots, |
| 139 | unsigned Count) { |
| 140 | // Scroll past alloca instructions. |
| 141 | BasicBlock::iterator IP = F.getEntryBlock().begin(); |
| 142 | while (isa<AllocaInst>(IP)) ++IP; |
| 143 | |
| 144 | // Search for initializers in the initial BB. |
| 145 | SmallPtrSet<AllocaInst*,16> InitedRoots; |
| 146 | for (; !CouldBecomeSafePoint(IP); ++IP) |
| 147 | if (StoreInst *SI = dyn_cast<StoreInst>(IP)) |
| 148 | if (AllocaInst *AI = |
| 149 | dyn_cast<AllocaInst>(SI->getOperand(1)->stripPointerCasts())) |
| 150 | InitedRoots.insert(AI); |
| 151 | |
| 152 | // Add root initializers. |
| 153 | bool MadeChange = false; |
| 154 | |
| 155 | for (AllocaInst **I = Roots, **E = Roots + Count; I != E; ++I) |
| 156 | if (!InitedRoots.count(*I)) { |
| 157 | StoreInst* SI = new StoreInst(ConstantPointerNull::get(cast<PointerType>( |
| 158 | cast<PointerType>((*I)->getType())->getElementType())), |
| 159 | *I); |
| 160 | SI->insertAfter(*I); |
| 161 | MadeChange = true; |
| 162 | } |
| 163 | |
| 164 | return MadeChange; |
| 165 | } |
| 166 | |
| 167 | bool LowerIntrinsics::NeedsDefaultLoweringPass(const GCStrategy &C) { |
| 168 | // Default lowering is necessary only if read or write barriers have a default |
| 169 | // action. The default for roots is no action. |
| 170 | return !C.customWriteBarrier() |
| 171 | || !C.customReadBarrier() |
| 172 | || C.initializeRoots(); |
| 173 | } |
| 174 | |
| 175 | bool LowerIntrinsics::NeedsCustomLoweringPass(const GCStrategy &C) { |
| 176 | // Custom lowering is only necessary if enabled for some action. |
| 177 | return C.customWriteBarrier() |
| 178 | || C.customReadBarrier() |
| 179 | || C.customRoots(); |
| 180 | } |
| 181 | |
| 182 | /// CouldBecomeSafePoint - Predicate to conservatively determine whether the |
| 183 | /// instruction could introduce a safe point. |
| 184 | bool LowerIntrinsics::CouldBecomeSafePoint(Instruction *I) { |
| 185 | // The natural definition of instructions which could introduce safe points |
| 186 | // are: |
| 187 | // |
| 188 | // - call, invoke (AfterCall, BeforeCall) |
| 189 | // - phis (Loops) |
| 190 | // - invoke, ret, unwind (Exit) |
| 191 | // |
| 192 | // However, instructions as seemingly inoccuous as arithmetic can become |
| 193 | // libcalls upon lowering (e.g., div i64 on a 32-bit platform), so instead |
| 194 | // it is necessary to take a conservative approach. |
| 195 | |
| 196 | if (isa<AllocaInst>(I) || isa<GetElementPtrInst>(I) || |
| 197 | isa<StoreInst>(I) || isa<LoadInst>(I)) |
| 198 | return false; |
| 199 | |
| 200 | // llvm.gcroot is safe because it doesn't do anything at runtime. |
| 201 | if (CallInst *CI = dyn_cast<CallInst>(I)) |
| 202 | if (Function *F = CI->getCalledFunction()) |
| 203 | if (unsigned IID = F->getIntrinsicID()) |
| 204 | if (IID == Intrinsic::gcroot) |
| 205 | return false; |
| 206 | |
| 207 | return true; |
| 208 | } |
| 209 | |
| 210 | /// runOnFunction - Replace gcread/gcwrite intrinsics with loads and stores. |
| 211 | /// Leave gcroot intrinsics; the code generator needs to see those. |
| 212 | bool LowerIntrinsics::runOnFunction(Function &F) { |
| 213 | // Quick exit for functions that do not use GC. |
| 214 | if (!F.hasGC()) |
| 215 | return false; |
| 216 | |
| 217 | GCFunctionInfo &FI = getAnalysis<GCModuleInfo>().getFunctionInfo(F); |
| 218 | GCStrategy &S = FI.getStrategy(); |
| 219 | |
| 220 | bool MadeChange = false; |
| 221 | |
| 222 | if (NeedsDefaultLoweringPass(S)) |
| 223 | MadeChange |= PerformDefaultLowering(F, S); |
| 224 | |
| 225 | bool UseCustomLoweringPass = NeedsCustomLoweringPass(S); |
| 226 | if (UseCustomLoweringPass) |
| 227 | MadeChange |= S.performCustomLowering(F); |
| 228 | |
| 229 | // Custom lowering may modify the CFG, so dominators must be recomputed. |
| 230 | if (UseCustomLoweringPass) { |
| 231 | if (DominatorTreeWrapperPass *DTWP = |
| 232 | getAnalysisIfAvailable<DominatorTreeWrapperPass>()) |
| 233 | DTWP->getDomTree().recalculate(F); |
| 234 | } |
| 235 | |
| 236 | return MadeChange; |
| 237 | } |
| 238 | |
| 239 | bool LowerIntrinsics::PerformDefaultLowering(Function &F, GCStrategy &S) { |
| 240 | bool LowerWr = !S.customWriteBarrier(); |
| 241 | bool LowerRd = !S.customReadBarrier(); |
| 242 | bool InitRoots = S.initializeRoots(); |
| 243 | |
| 244 | SmallVector<AllocaInst*, 32> Roots; |
| 245 | |
| 246 | bool MadeChange = false; |
| 247 | for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB) { |
| 248 | for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;) { |
| 249 | if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++)) { |
| 250 | Function *F = CI->getCalledFunction(); |
| 251 | switch (F->getIntrinsicID()) { |
| 252 | case Intrinsic::gcwrite: |
| 253 | if (LowerWr) { |
| 254 | // Replace a write barrier with a simple store. |
| 255 | Value *St = new StoreInst(CI->getArgOperand(0), |
| 256 | CI->getArgOperand(2), CI); |
| 257 | CI->replaceAllUsesWith(St); |
| 258 | CI->eraseFromParent(); |
| 259 | } |
| 260 | break; |
| 261 | case Intrinsic::gcread: |
| 262 | if (LowerRd) { |
| 263 | // Replace a read barrier with a simple load. |
| 264 | Value *Ld = new LoadInst(CI->getArgOperand(1), "", CI); |
| 265 | Ld->takeName(CI); |
| 266 | CI->replaceAllUsesWith(Ld); |
| 267 | CI->eraseFromParent(); |
| 268 | } |
| 269 | break; |
| 270 | case Intrinsic::gcroot: |
| 271 | if (InitRoots) { |
| 272 | // Initialize the GC root, but do not delete the intrinsic. The |
| 273 | // backend needs the intrinsic to flag the stack slot. |
| 274 | Roots.push_back(cast<AllocaInst>( |
| 275 | CI->getArgOperand(0)->stripPointerCasts())); |
| 276 | } |
| 277 | break; |
| 278 | default: |
| 279 | continue; |
| 280 | } |
| 281 | |
| 282 | MadeChange = true; |
| 283 | } |
| 284 | } |
| 285 | } |
| 286 | |
| 287 | if (Roots.size()) |
| 288 | MadeChange |= InsertRootInitializers(F, Roots.begin(), Roots.size()); |
| 289 | |
| 290 | return MadeChange; |
| 291 | } |
| 292 | |
| 293 | // ----------------------------------------------------------------------------- |
| 294 | |
| 295 | char GCMachineCodeAnalysis::ID = 0; |
| 296 | char &llvm::GCMachineCodeAnalysisID = GCMachineCodeAnalysis::ID; |
| 297 | |
| 298 | INITIALIZE_PASS(GCMachineCodeAnalysis, "gc-analysis", |
| 299 | "Analyze Machine Code For Garbage Collection", false, false) |
| 300 | |
| 301 | GCMachineCodeAnalysis::GCMachineCodeAnalysis() |
| 302 | : MachineFunctionPass(ID) {} |
| 303 | |
| 304 | void GCMachineCodeAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { |
| 305 | MachineFunctionPass::getAnalysisUsage(AU); |
| 306 | AU.setPreservesAll(); |
| 307 | AU.addRequired<MachineModuleInfo>(); |
| 308 | AU.addRequired<GCModuleInfo>(); |
| 309 | } |
| 310 | |
| 311 | MCSymbol *GCMachineCodeAnalysis::InsertLabel(MachineBasicBlock &MBB, |
| 312 | MachineBasicBlock::iterator MI, |
| 313 | DebugLoc DL) const { |
| 314 | MCSymbol *Label = MBB.getParent()->getContext().CreateTempSymbol(); |
| 315 | BuildMI(MBB, MI, DL, TII->get(TargetOpcode::GC_LABEL)).addSym(Label); |
| 316 | return Label; |
| 317 | } |
| 318 | |
| 319 | void GCMachineCodeAnalysis::VisitCallPoint(MachineBasicBlock::iterator CI) { |
| 320 | // Find the return address (next instruction), too, so as to bracket the call |
| 321 | // instruction. |
| 322 | MachineBasicBlock::iterator RAI = CI; |
| 323 | ++RAI; |
| 324 | |
| 325 | if (FI->getStrategy().needsSafePoint(GC::PreCall)) { |
| 326 | MCSymbol* Label = InsertLabel(*CI->getParent(), CI, CI->getDebugLoc()); |
| 327 | FI->addSafePoint(GC::PreCall, Label, CI->getDebugLoc()); |
| 328 | } |
| 329 | |
| 330 | if (FI->getStrategy().needsSafePoint(GC::PostCall)) { |
| 331 | MCSymbol* Label = InsertLabel(*CI->getParent(), RAI, CI->getDebugLoc()); |
| 332 | FI->addSafePoint(GC::PostCall, Label, CI->getDebugLoc()); |
| 333 | } |
| 334 | } |
| 335 | |
| 336 | void GCMachineCodeAnalysis::FindSafePoints(MachineFunction &MF) { |
| 337 | for (MachineFunction::iterator BBI = MF.begin(), |
| 338 | BBE = MF.end(); BBI != BBE; ++BBI) |
| 339 | for (MachineBasicBlock::iterator MI = BBI->begin(), |
| 340 | ME = BBI->end(); MI != ME; ++MI) |
| 341 | if (MI->isCall()) |
| 342 | VisitCallPoint(MI); |
| 343 | } |
| 344 | |
| 345 | void GCMachineCodeAnalysis::FindStackOffsets(MachineFunction &MF) { |
| 346 | const TargetFrameLowering *TFI = TM->getSubtargetImpl()->getFrameLowering(); |
| 347 | assert(TFI && "TargetRegisterInfo not available!"); |
| 348 | |
| 349 | for (GCFunctionInfo::roots_iterator RI = FI->roots_begin(); |
| 350 | RI != FI->roots_end();) { |
| 351 | // If the root references a dead object, no need to keep it. |
| 352 | if (MF.getFrameInfo()->isDeadObjectIndex(RI->Num)) { |
| 353 | RI = FI->removeStackRoot(RI); |
| 354 | } else { |
| 355 | RI->StackOffset = TFI->getFrameIndexOffset(MF, RI->Num); |
| 356 | ++RI; |
| 357 | } |
| 358 | } |
| 359 | } |
| 360 | |
| 361 | bool GCMachineCodeAnalysis::runOnMachineFunction(MachineFunction &MF) { |
| 362 | // Quick exit for functions that do not use GC. |
| 363 | if (!MF.getFunction()->hasGC()) |
| 364 | return false; |
| 365 | |
| 366 | FI = &getAnalysis<GCModuleInfo>().getFunctionInfo(*MF.getFunction()); |
| 367 | if (!FI->getStrategy().needsSafePoints()) |
| 368 | return false; |
| 369 | |
| 370 | TM = &MF.getTarget(); |
| 371 | MMI = &getAnalysis<MachineModuleInfo>(); |
| 372 | TII = TM->getSubtargetImpl()->getInstrInfo(); |
| 373 | |
| 374 | // Find the size of the stack frame. |
| 375 | FI->setFrameSize(MF.getFrameInfo()->getStackSize()); |
| 376 | |
| 377 | // Find all safe points. |
| 378 | if (FI->getStrategy().customSafePoints()) { |
| 379 | FI->getStrategy().findCustomSafePoints(*FI, MF); |
| 380 | } else { |
| 381 | FindSafePoints(MF); |
| 382 | } |
| 383 | |
| 384 | // Find the stack offsets for all roots. |
| 385 | FindStackOffsets(MF); |
| 386 | |
| 387 | return false; |
| 388 | } |