Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 1 | //===-- JITEmitter.cpp - Write machine code to executable memory ----------===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
Chris Lattner | 081ce94 | 2007-12-29 20:36:04 +0000 | [diff] [blame] | 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file defines a MachineCodeEmitter object that is used by the JIT to |
| 11 | // write machine code to memory and remember where relocatable values are. |
| 12 | // |
| 13 | //===----------------------------------------------------------------------===// |
| 14 | |
| 15 | #define DEBUG_TYPE "jit" |
| 16 | #include "JIT.h" |
Nicolas Geoffray | 0e757e1 | 2008-02-13 18:39:37 +0000 | [diff] [blame^] | 17 | #include "JITDwarfEmitter.h" |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 18 | #include "llvm/Constant.h" |
| 19 | #include "llvm/Module.h" |
| 20 | #include "llvm/Type.h" |
| 21 | #include "llvm/CodeGen/MachineCodeEmitter.h" |
| 22 | #include "llvm/CodeGen/MachineFunction.h" |
| 23 | #include "llvm/CodeGen/MachineConstantPool.h" |
| 24 | #include "llvm/CodeGen/MachineJumpTableInfo.h" |
Nicolas Geoffray | 0e757e1 | 2008-02-13 18:39:37 +0000 | [diff] [blame^] | 25 | #include "llvm/CodeGen/MachineModuleInfo.h" |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 26 | #include "llvm/CodeGen/MachineRelocation.h" |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 27 | #include "llvm/ExecutionEngine/JITMemoryManager.h" |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 28 | #include "llvm/Target/TargetData.h" |
| 29 | #include "llvm/Target/TargetJITInfo.h" |
| 30 | #include "llvm/Target/TargetMachine.h" |
Nicolas Geoffray | 0e757e1 | 2008-02-13 18:39:37 +0000 | [diff] [blame^] | 31 | #include "llvm/Target/TargetOptions.h" |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 32 | #include "llvm/Support/Debug.h" |
| 33 | #include "llvm/Support/MutexGuard.h" |
| 34 | #include "llvm/System/Disassembler.h" |
| 35 | #include "llvm/ADT/Statistic.h" |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 36 | #include <algorithm> |
| 37 | using namespace llvm; |
| 38 | |
| 39 | STATISTIC(NumBytes, "Number of bytes of machine code compiled"); |
| 40 | STATISTIC(NumRelos, "Number of relocations applied"); |
| 41 | static JIT *TheJIT = 0; |
| 42 | |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 43 | |
| 44 | //===----------------------------------------------------------------------===// |
| 45 | // JIT lazy compilation code. |
| 46 | // |
| 47 | namespace { |
| 48 | class JITResolverState { |
| 49 | private: |
| 50 | /// FunctionToStubMap - Keep track of the stub created for a particular |
| 51 | /// function so that we can reuse them if necessary. |
| 52 | std::map<Function*, void*> FunctionToStubMap; |
| 53 | |
| 54 | /// StubToFunctionMap - Keep track of the function that each stub |
| 55 | /// corresponds to. |
| 56 | std::map<void*, Function*> StubToFunctionMap; |
| 57 | |
Evan Cheng | 28e7e16 | 2008-01-04 10:46:51 +0000 | [diff] [blame] | 58 | /// GlobalToLazyPtrMap - Keep track of the lazy pointer created for a |
| 59 | /// particular GlobalVariable so that we can reuse them if necessary. |
| 60 | std::map<GlobalValue*, void*> GlobalToLazyPtrMap; |
| 61 | |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 62 | public: |
| 63 | std::map<Function*, void*>& getFunctionToStubMap(const MutexGuard& locked) { |
| 64 | assert(locked.holds(TheJIT->lock)); |
| 65 | return FunctionToStubMap; |
| 66 | } |
| 67 | |
| 68 | std::map<void*, Function*>& getStubToFunctionMap(const MutexGuard& locked) { |
| 69 | assert(locked.holds(TheJIT->lock)); |
| 70 | return StubToFunctionMap; |
| 71 | } |
Evan Cheng | 28e7e16 | 2008-01-04 10:46:51 +0000 | [diff] [blame] | 72 | |
| 73 | std::map<GlobalValue*, void*>& |
| 74 | getGlobalToLazyPtrMap(const MutexGuard& locked) { |
| 75 | assert(locked.holds(TheJIT->lock)); |
| 76 | return GlobalToLazyPtrMap; |
| 77 | } |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 78 | }; |
| 79 | |
| 80 | /// JITResolver - Keep track of, and resolve, call sites for functions that |
| 81 | /// have not yet been compiled. |
| 82 | class JITResolver { |
| 83 | /// LazyResolverFn - The target lazy resolver function that we actually |
| 84 | /// rewrite instructions to use. |
| 85 | TargetJITInfo::LazyResolverFn LazyResolverFn; |
| 86 | |
| 87 | JITResolverState state; |
| 88 | |
| 89 | /// ExternalFnToStubMap - This is the equivalent of FunctionToStubMap for |
| 90 | /// external functions. |
| 91 | std::map<void*, void*> ExternalFnToStubMap; |
| 92 | |
| 93 | //map addresses to indexes in the GOT |
| 94 | std::map<void*, unsigned> revGOTMap; |
| 95 | unsigned nextGOTIndex; |
| 96 | |
| 97 | static JITResolver *TheJITResolver; |
| 98 | public: |
| 99 | JITResolver(JIT &jit) : nextGOTIndex(0) { |
| 100 | TheJIT = &jit; |
| 101 | |
| 102 | LazyResolverFn = jit.getJITInfo().getLazyResolverFunction(JITCompilerFn); |
| 103 | assert(TheJITResolver == 0 && "Multiple JIT resolvers?"); |
| 104 | TheJITResolver = this; |
| 105 | } |
| 106 | |
| 107 | ~JITResolver() { |
| 108 | TheJITResolver = 0; |
| 109 | } |
| 110 | |
| 111 | /// getFunctionStub - This returns a pointer to a function stub, creating |
| 112 | /// one on demand as needed. |
| 113 | void *getFunctionStub(Function *F); |
| 114 | |
| 115 | /// getExternalFunctionStub - Return a stub for the function at the |
| 116 | /// specified address, created lazily on demand. |
| 117 | void *getExternalFunctionStub(void *FnAddr); |
| 118 | |
Evan Cheng | 28e7e16 | 2008-01-04 10:46:51 +0000 | [diff] [blame] | 119 | /// getGlobalValueLazyPtr - Return a lazy pointer containing the specified |
| 120 | /// GV address. |
| 121 | void *getGlobalValueLazyPtr(GlobalValue *V, void *GVAddress); |
| 122 | |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 123 | /// AddCallbackAtLocation - If the target is capable of rewriting an |
| 124 | /// instruction without the use of a stub, record the location of the use so |
| 125 | /// we know which function is being used at the location. |
| 126 | void *AddCallbackAtLocation(Function *F, void *Location) { |
| 127 | MutexGuard locked(TheJIT->lock); |
| 128 | /// Get the target-specific JIT resolver function. |
| 129 | state.getStubToFunctionMap(locked)[Location] = F; |
| 130 | return (void*)(intptr_t)LazyResolverFn; |
| 131 | } |
| 132 | |
| 133 | /// getGOTIndexForAddress - Return a new or existing index in the GOT for |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 134 | /// an address. This function only manages slots, it does not manage the |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 135 | /// contents of the slots or the memory associated with the GOT. |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 136 | unsigned getGOTIndexForAddr(void *addr); |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 137 | |
| 138 | /// JITCompilerFn - This function is called to resolve a stub to a compiled |
| 139 | /// address. If the LLVM Function corresponding to the stub has not yet |
| 140 | /// been compiled, this function compiles it first. |
| 141 | static void *JITCompilerFn(void *Stub); |
| 142 | }; |
| 143 | } |
| 144 | |
| 145 | JITResolver *JITResolver::TheJITResolver = 0; |
| 146 | |
| 147 | #if (defined(__POWERPC__) || defined (__ppc__) || defined(_POWER)) && \ |
| 148 | defined(__APPLE__) |
| 149 | extern "C" void sys_icache_invalidate(const void *Addr, size_t len); |
| 150 | #endif |
| 151 | |
| 152 | /// synchronizeICache - On some targets, the JIT emitted code must be |
| 153 | /// explicitly refetched to ensure correct execution. |
| 154 | static void synchronizeICache(const void *Addr, size_t len) { |
| 155 | #if (defined(__POWERPC__) || defined (__ppc__) || defined(_POWER)) && \ |
| 156 | defined(__APPLE__) |
| 157 | sys_icache_invalidate(Addr, len); |
| 158 | #endif |
| 159 | } |
| 160 | |
| 161 | /// getFunctionStub - This returns a pointer to a function stub, creating |
| 162 | /// one on demand as needed. |
| 163 | void *JITResolver::getFunctionStub(Function *F) { |
| 164 | MutexGuard locked(TheJIT->lock); |
| 165 | |
| 166 | // If we already have a stub for this function, recycle it. |
| 167 | void *&Stub = state.getFunctionToStubMap(locked)[F]; |
| 168 | if (Stub) return Stub; |
| 169 | |
| 170 | // Call the lazy resolver function unless we already KNOW it is an external |
| 171 | // function, in which case we just skip the lazy resolution step. |
| 172 | void *Actual = (void*)(intptr_t)LazyResolverFn; |
| 173 | if (F->isDeclaration() && !F->hasNotBeenReadFromBitcode()) |
| 174 | Actual = TheJIT->getPointerToFunction(F); |
| 175 | |
| 176 | // Otherwise, codegen a new stub. For now, the stub will call the lazy |
| 177 | // resolver function. |
| 178 | Stub = TheJIT->getJITInfo().emitFunctionStub(Actual, |
| 179 | *TheJIT->getCodeEmitter()); |
| 180 | |
| 181 | if (Actual != (void*)(intptr_t)LazyResolverFn) { |
| 182 | // If we are getting the stub for an external function, we really want the |
| 183 | // address of the stub in the GlobalAddressMap for the JIT, not the address |
| 184 | // of the external function. |
| 185 | TheJIT->updateGlobalMapping(F, Stub); |
| 186 | } |
| 187 | |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 188 | DOUT << "JIT: Stub emitted at [" << Stub << "] for function '" |
| 189 | << F->getName() << "'\n"; |
| 190 | |
| 191 | // Finally, keep track of the stub-to-Function mapping so that the |
| 192 | // JITCompilerFn knows which function to compile! |
| 193 | state.getStubToFunctionMap(locked)[Stub] = F; |
| 194 | return Stub; |
| 195 | } |
| 196 | |
Evan Cheng | 28e7e16 | 2008-01-04 10:46:51 +0000 | [diff] [blame] | 197 | /// getGlobalValueLazyPtr - Return a lazy pointer containing the specified |
| 198 | /// GV address. |
| 199 | void *JITResolver::getGlobalValueLazyPtr(GlobalValue *GV, void *GVAddress) { |
| 200 | MutexGuard locked(TheJIT->lock); |
| 201 | |
| 202 | // If we already have a stub for this global variable, recycle it. |
| 203 | void *&LazyPtr = state.getGlobalToLazyPtrMap(locked)[GV]; |
| 204 | if (LazyPtr) return LazyPtr; |
| 205 | |
| 206 | // Otherwise, codegen a new lazy pointer. |
| 207 | LazyPtr = TheJIT->getJITInfo().emitGlobalValueLazyPtr(GVAddress, |
| 208 | *TheJIT->getCodeEmitter()); |
| 209 | |
| 210 | DOUT << "JIT: Stub emitted at [" << LazyPtr << "] for GV '" |
| 211 | << GV->getName() << "'\n"; |
| 212 | |
| 213 | return LazyPtr; |
| 214 | } |
| 215 | |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 216 | /// getExternalFunctionStub - Return a stub for the function at the |
| 217 | /// specified address, created lazily on demand. |
| 218 | void *JITResolver::getExternalFunctionStub(void *FnAddr) { |
| 219 | // If we already have a stub for this function, recycle it. |
| 220 | void *&Stub = ExternalFnToStubMap[FnAddr]; |
| 221 | if (Stub) return Stub; |
| 222 | |
| 223 | Stub = TheJIT->getJITInfo().emitFunctionStub(FnAddr, |
| 224 | *TheJIT->getCodeEmitter()); |
| 225 | |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 226 | DOUT << "JIT: Stub emitted at [" << Stub |
| 227 | << "] for external function at '" << FnAddr << "'\n"; |
| 228 | return Stub; |
| 229 | } |
| 230 | |
| 231 | unsigned JITResolver::getGOTIndexForAddr(void* addr) { |
| 232 | unsigned idx = revGOTMap[addr]; |
| 233 | if (!idx) { |
| 234 | idx = ++nextGOTIndex; |
| 235 | revGOTMap[addr] = idx; |
| 236 | DOUT << "Adding GOT entry " << idx |
| 237 | << " for addr " << addr << "\n"; |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 238 | } |
| 239 | return idx; |
| 240 | } |
| 241 | |
| 242 | /// JITCompilerFn - This function is called when a lazy compilation stub has |
| 243 | /// been entered. It looks up which function this stub corresponds to, compiles |
| 244 | /// it if necessary, then returns the resultant function pointer. |
| 245 | void *JITResolver::JITCompilerFn(void *Stub) { |
| 246 | JITResolver &JR = *TheJITResolver; |
| 247 | |
| 248 | MutexGuard locked(TheJIT->lock); |
| 249 | |
| 250 | // The address given to us for the stub may not be exactly right, it might be |
| 251 | // a little bit after the stub. As such, use upper_bound to find it. |
| 252 | std::map<void*, Function*>::iterator I = |
| 253 | JR.state.getStubToFunctionMap(locked).upper_bound(Stub); |
| 254 | assert(I != JR.state.getStubToFunctionMap(locked).begin() && |
| 255 | "This is not a known stub!"); |
| 256 | Function *F = (--I)->second; |
| 257 | |
| 258 | // If we have already code generated the function, just return the address. |
| 259 | void *Result = TheJIT->getPointerToGlobalIfAvailable(F); |
| 260 | |
| 261 | if (!Result) { |
| 262 | // Otherwise we don't have it, do lazy compilation now. |
| 263 | |
| 264 | // If lazy compilation is disabled, emit a useful error message and abort. |
| 265 | if (TheJIT->isLazyCompilationDisabled()) { |
| 266 | cerr << "LLVM JIT requested to do lazy compilation of function '" |
| 267 | << F->getName() << "' when lazy compiles are disabled!\n"; |
| 268 | abort(); |
| 269 | } |
| 270 | |
| 271 | // We might like to remove the stub from the StubToFunction map. |
| 272 | // We can't do that! Multiple threads could be stuck, waiting to acquire the |
| 273 | // lock above. As soon as the 1st function finishes compiling the function, |
| 274 | // the next one will be released, and needs to be able to find the function |
| 275 | // it needs to call. |
| 276 | //JR.state.getStubToFunctionMap(locked).erase(I); |
| 277 | |
| 278 | DOUT << "JIT: Lazily resolving function '" << F->getName() |
| 279 | << "' In stub ptr = " << Stub << " actual ptr = " |
| 280 | << I->first << "\n"; |
| 281 | |
| 282 | Result = TheJIT->getPointerToFunction(F); |
| 283 | } |
| 284 | |
| 285 | // We don't need to reuse this stub in the future, as F is now compiled. |
| 286 | JR.state.getFunctionToStubMap(locked).erase(F); |
| 287 | |
| 288 | // FIXME: We could rewrite all references to this stub if we knew them. |
| 289 | |
| 290 | // What we will do is set the compiled function address to map to the |
| 291 | // same GOT entry as the stub so that later clients may update the GOT |
| 292 | // if they see it still using the stub address. |
| 293 | // Note: this is done so the Resolver doesn't have to manage GOT memory |
| 294 | // Do this without allocating map space if the target isn't using a GOT |
| 295 | if(JR.revGOTMap.find(Stub) != JR.revGOTMap.end()) |
| 296 | JR.revGOTMap[Result] = JR.revGOTMap[Stub]; |
| 297 | |
| 298 | return Result; |
| 299 | } |
| 300 | |
| 301 | |
| 302 | //===----------------------------------------------------------------------===// |
| 303 | // JITEmitter code. |
| 304 | // |
| 305 | namespace { |
| 306 | /// JITEmitter - The JIT implementation of the MachineCodeEmitter, which is |
| 307 | /// used to output functions to memory for execution. |
| 308 | class JITEmitter : public MachineCodeEmitter { |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 309 | JITMemoryManager *MemMgr; |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 310 | |
| 311 | // When outputting a function stub in the context of some other function, we |
| 312 | // save BufferBegin/BufferEnd/CurBufferPtr here. |
| 313 | unsigned char *SavedBufferBegin, *SavedBufferEnd, *SavedCurBufferPtr; |
| 314 | |
| 315 | /// Relocations - These are the relocations that the function needs, as |
| 316 | /// emitted. |
| 317 | std::vector<MachineRelocation> Relocations; |
| 318 | |
| 319 | /// MBBLocations - This vector is a mapping from MBB ID's to their address. |
| 320 | /// It is filled in by the StartMachineBasicBlock callback and queried by |
| 321 | /// the getMachineBasicBlockAddress callback. |
| 322 | std::vector<intptr_t> MBBLocations; |
| 323 | |
| 324 | /// ConstantPool - The constant pool for the current function. |
| 325 | /// |
| 326 | MachineConstantPool *ConstantPool; |
| 327 | |
| 328 | /// ConstantPoolBase - A pointer to the first entry in the constant pool. |
| 329 | /// |
| 330 | void *ConstantPoolBase; |
| 331 | |
| 332 | /// JumpTable - The jump tables for the current function. |
| 333 | /// |
| 334 | MachineJumpTableInfo *JumpTable; |
| 335 | |
| 336 | /// JumpTableBase - A pointer to the first entry in the jump table. |
| 337 | /// |
| 338 | void *JumpTableBase; |
Evan Cheng | af74325 | 2008-01-05 02:26:58 +0000 | [diff] [blame] | 339 | |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 340 | /// Resolver - This contains info about the currently resolved functions. |
| 341 | JITResolver Resolver; |
Nicolas Geoffray | 0e757e1 | 2008-02-13 18:39:37 +0000 | [diff] [blame^] | 342 | |
| 343 | /// DE - The dwarf emitter for the jit. |
| 344 | JITDwarfEmitter *DE; |
| 345 | |
| 346 | /// LabelLocations - This vector is a mapping from Label ID's to their |
| 347 | /// address. |
| 348 | std::vector<intptr_t> LabelLocations; |
| 349 | |
| 350 | /// MMI - Machine module info for exception informations |
| 351 | MachineModuleInfo* MMI; |
| 352 | |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 353 | public: |
Chris Lattner | e44be00 | 2007-12-06 01:08:09 +0000 | [diff] [blame] | 354 | JITEmitter(JIT &jit, JITMemoryManager *JMM) : Resolver(jit) { |
| 355 | MemMgr = JMM ? JMM : JITMemoryManager::CreateDefaultMemManager(); |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 356 | if (jit.getJITInfo().needsGOT()) { |
| 357 | MemMgr->AllocateGOT(); |
| 358 | DOUT << "JIT is managing a GOT\n"; |
| 359 | } |
Nicolas Geoffray | 0e757e1 | 2008-02-13 18:39:37 +0000 | [diff] [blame^] | 360 | |
| 361 | if (ExceptionHandling) DE = new JITDwarfEmitter(jit); |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 362 | } |
| 363 | ~JITEmitter() { |
| 364 | delete MemMgr; |
Nicolas Geoffray | 0e757e1 | 2008-02-13 18:39:37 +0000 | [diff] [blame^] | 365 | if (ExceptionHandling) delete DE; |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 366 | } |
| 367 | |
| 368 | JITResolver &getJITResolver() { return Resolver; } |
| 369 | |
| 370 | virtual void startFunction(MachineFunction &F); |
| 371 | virtual bool finishFunction(MachineFunction &F); |
| 372 | |
| 373 | void emitConstantPool(MachineConstantPool *MCP); |
| 374 | void initJumpTableInfo(MachineJumpTableInfo *MJTI); |
| 375 | void emitJumpTableInfo(MachineJumpTableInfo *MJTI); |
| 376 | |
| 377 | virtual void startFunctionStub(unsigned StubSize, unsigned Alignment = 1); |
| 378 | virtual void* finishFunctionStub(const Function *F); |
| 379 | |
| 380 | virtual void addRelocation(const MachineRelocation &MR) { |
| 381 | Relocations.push_back(MR); |
| 382 | } |
| 383 | |
| 384 | virtual void StartMachineBasicBlock(MachineBasicBlock *MBB) { |
| 385 | if (MBBLocations.size() <= (unsigned)MBB->getNumber()) |
| 386 | MBBLocations.resize((MBB->getNumber()+1)*2); |
| 387 | MBBLocations[MBB->getNumber()] = getCurrentPCValue(); |
| 388 | } |
| 389 | |
| 390 | virtual intptr_t getConstantPoolEntryAddress(unsigned Entry) const; |
| 391 | virtual intptr_t getJumpTableEntryAddress(unsigned Entry) const; |
Evan Cheng | af74325 | 2008-01-05 02:26:58 +0000 | [diff] [blame] | 392 | |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 393 | virtual intptr_t getMachineBasicBlockAddress(MachineBasicBlock *MBB) const { |
| 394 | assert(MBBLocations.size() > (unsigned)MBB->getNumber() && |
| 395 | MBBLocations[MBB->getNumber()] && "MBB not emitted!"); |
| 396 | return MBBLocations[MBB->getNumber()]; |
| 397 | } |
| 398 | |
| 399 | /// deallocateMemForFunction - Deallocate all memory for the specified |
| 400 | /// function body. |
| 401 | void deallocateMemForFunction(Function *F) { |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 402 | MemMgr->deallocateMemForFunction(F); |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 403 | } |
Nicolas Geoffray | 0e757e1 | 2008-02-13 18:39:37 +0000 | [diff] [blame^] | 404 | |
| 405 | virtual void emitLabel(uint64_t LabelID) { |
| 406 | if (LabelLocations.size() <= LabelID) |
| 407 | LabelLocations.resize((LabelID+1)*2); |
| 408 | LabelLocations[LabelID] = getCurrentPCValue(); |
| 409 | } |
| 410 | |
| 411 | virtual intptr_t getLabelAddress(uint64_t LabelID) const { |
| 412 | assert(LabelLocations.size() > (unsigned)LabelID && |
| 413 | LabelLocations[LabelID] && "Label not emitted!"); |
| 414 | return LabelLocations[LabelID]; |
| 415 | } |
| 416 | |
| 417 | virtual void setModuleInfo(MachineModuleInfo* Info) { |
| 418 | MMI = Info; |
| 419 | if (ExceptionHandling) DE->setModuleInfo(Info); |
| 420 | } |
| 421 | |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 422 | private: |
| 423 | void *getPointerToGlobal(GlobalValue *GV, void *Reference, bool NoNeedStub); |
Evan Cheng | 28e7e16 | 2008-01-04 10:46:51 +0000 | [diff] [blame] | 424 | void *getPointerToGVLazyPtr(GlobalValue *V, void *Reference, |
| 425 | bool NoNeedStub); |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 426 | }; |
| 427 | } |
| 428 | |
| 429 | void *JITEmitter::getPointerToGlobal(GlobalValue *V, void *Reference, |
| 430 | bool DoesntNeedStub) { |
| 431 | if (GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) { |
| 432 | /// FIXME: If we straightened things out, this could actually emit the |
| 433 | /// global immediately instead of queuing it for codegen later! |
| 434 | return TheJIT->getOrEmitGlobalVariable(GV); |
| 435 | } |
| 436 | |
| 437 | // If we have already compiled the function, return a pointer to its body. |
| 438 | Function *F = cast<Function>(V); |
| 439 | void *ResultPtr = TheJIT->getPointerToGlobalIfAvailable(F); |
| 440 | if (ResultPtr) return ResultPtr; |
| 441 | |
| 442 | if (F->isDeclaration() && !F->hasNotBeenReadFromBitcode()) { |
| 443 | // If this is an external function pointer, we can force the JIT to |
| 444 | // 'compile' it, which really just adds it to the map. |
| 445 | if (DoesntNeedStub) |
| 446 | return TheJIT->getPointerToFunction(F); |
| 447 | |
| 448 | return Resolver.getFunctionStub(F); |
| 449 | } |
| 450 | |
| 451 | // Okay, the function has not been compiled yet, if the target callback |
| 452 | // mechanism is capable of rewriting the instruction directly, prefer to do |
| 453 | // that instead of emitting a stub. |
| 454 | if (DoesntNeedStub) |
| 455 | return Resolver.AddCallbackAtLocation(F, Reference); |
| 456 | |
| 457 | // Otherwise, we have to emit a lazy resolving stub. |
| 458 | return Resolver.getFunctionStub(F); |
| 459 | } |
| 460 | |
Evan Cheng | 28e7e16 | 2008-01-04 10:46:51 +0000 | [diff] [blame] | 461 | void *JITEmitter::getPointerToGVLazyPtr(GlobalValue *V, void *Reference, |
| 462 | bool DoesntNeedStub) { |
| 463 | // Make sure GV is emitted first. |
| 464 | // FIXME: For now, if the GV is an external function we force the JIT to |
| 465 | // compile it so the lazy pointer will contain the fully resolved address. |
| 466 | void *GVAddress = getPointerToGlobal(V, Reference, true); |
| 467 | return Resolver.getGlobalValueLazyPtr(V, GVAddress); |
| 468 | } |
| 469 | |
| 470 | |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 471 | void JITEmitter::startFunction(MachineFunction &F) { |
| 472 | uintptr_t ActualSize; |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 473 | BufferBegin = CurBufferPtr = MemMgr->startFunctionBody(F.getFunction(), |
| 474 | ActualSize); |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 475 | BufferEnd = BufferBegin+ActualSize; |
| 476 | |
| 477 | // Ensure the constant pool/jump table info is at least 4-byte aligned. |
| 478 | emitAlignment(16); |
| 479 | |
| 480 | emitConstantPool(F.getConstantPool()); |
| 481 | initJumpTableInfo(F.getJumpTableInfo()); |
| 482 | |
| 483 | // About to start emitting the machine code for the function. |
| 484 | emitAlignment(std::max(F.getFunction()->getAlignment(), 8U)); |
| 485 | TheJIT->updateGlobalMapping(F.getFunction(), CurBufferPtr); |
| 486 | |
| 487 | MBBLocations.clear(); |
| 488 | } |
| 489 | |
| 490 | bool JITEmitter::finishFunction(MachineFunction &F) { |
| 491 | if (CurBufferPtr == BufferEnd) { |
| 492 | // FIXME: Allocate more space, then try again. |
| 493 | cerr << "JIT: Ran out of space for generated machine code!\n"; |
| 494 | abort(); |
| 495 | } |
| 496 | |
| 497 | emitJumpTableInfo(F.getJumpTableInfo()); |
| 498 | |
| 499 | // FnStart is the start of the text, not the start of the constant pool and |
| 500 | // other per-function data. |
| 501 | unsigned char *FnStart = |
| 502 | (unsigned char *)TheJIT->getPointerToGlobalIfAvailable(F.getFunction()); |
| 503 | unsigned char *FnEnd = CurBufferPtr; |
| 504 | |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 505 | MemMgr->endFunctionBody(F.getFunction(), BufferBegin, FnEnd); |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 506 | NumBytes += FnEnd-FnStart; |
| 507 | |
| 508 | if (!Relocations.empty()) { |
| 509 | NumRelos += Relocations.size(); |
| 510 | |
| 511 | // Resolve the relocations to concrete pointers. |
| 512 | for (unsigned i = 0, e = Relocations.size(); i != e; ++i) { |
| 513 | MachineRelocation &MR = Relocations[i]; |
| 514 | void *ResultPtr; |
| 515 | if (MR.isString()) { |
| 516 | ResultPtr = TheJIT->getPointerToNamedFunction(MR.getString()); |
| 517 | |
| 518 | // If the target REALLY wants a stub for this function, emit it now. |
Evan Cheng | f012387 | 2008-01-03 02:56:28 +0000 | [diff] [blame] | 519 | if (!MR.doesntNeedStub()) |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 520 | ResultPtr = Resolver.getExternalFunctionStub(ResultPtr); |
| 521 | } else if (MR.isGlobalValue()) { |
| 522 | ResultPtr = getPointerToGlobal(MR.getGlobalValue(), |
| 523 | BufferBegin+MR.getMachineCodeOffset(), |
Evan Cheng | f012387 | 2008-01-03 02:56:28 +0000 | [diff] [blame] | 524 | MR.doesntNeedStub()); |
Evan Cheng | 28e7e16 | 2008-01-04 10:46:51 +0000 | [diff] [blame] | 525 | } else if (MR.isGlobalValueLazyPtr()) { |
| 526 | ResultPtr = getPointerToGVLazyPtr(MR.getGlobalValue(), |
| 527 | BufferBegin+MR.getMachineCodeOffset(), |
| 528 | MR.doesntNeedStub()); |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 529 | } else if (MR.isBasicBlock()) { |
| 530 | ResultPtr = (void*)getMachineBasicBlockAddress(MR.getBasicBlock()); |
| 531 | } else if (MR.isConstantPoolIndex()) { |
| 532 | ResultPtr=(void*)getConstantPoolEntryAddress(MR.getConstantPoolIndex()); |
| 533 | } else { |
| 534 | assert(MR.isJumpTableIndex()); |
| 535 | ResultPtr=(void*)getJumpTableEntryAddress(MR.getJumpTableIndex()); |
| 536 | } |
| 537 | |
| 538 | MR.setResultPointer(ResultPtr); |
| 539 | |
| 540 | // if we are managing the GOT and the relocation wants an index, |
| 541 | // give it one |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 542 | if (MR.isGOTRelative() && MemMgr->isManagingGOT()) { |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 543 | unsigned idx = Resolver.getGOTIndexForAddr(ResultPtr); |
| 544 | MR.setGOTIndex(idx); |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 545 | if (((void**)MemMgr->getGOTBase())[idx] != ResultPtr) { |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 546 | DOUT << "GOT was out of date for " << ResultPtr |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 547 | << " pointing at " << ((void**)MemMgr->getGOTBase())[idx] |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 548 | << "\n"; |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 549 | ((void**)MemMgr->getGOTBase())[idx] = ResultPtr; |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 550 | } |
| 551 | } |
| 552 | } |
| 553 | |
| 554 | TheJIT->getJITInfo().relocate(BufferBegin, &Relocations[0], |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 555 | Relocations.size(), MemMgr->getGOTBase()); |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 556 | } |
| 557 | |
| 558 | // Update the GOT entry for F to point to the new code. |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 559 | if (MemMgr->isManagingGOT()) { |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 560 | unsigned idx = Resolver.getGOTIndexForAddr((void*)BufferBegin); |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 561 | if (((void**)MemMgr->getGOTBase())[idx] != (void*)BufferBegin) { |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 562 | DOUT << "GOT was out of date for " << (void*)BufferBegin |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 563 | << " pointing at " << ((void**)MemMgr->getGOTBase())[idx] << "\n"; |
| 564 | ((void**)MemMgr->getGOTBase())[idx] = (void*)BufferBegin; |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 565 | } |
| 566 | } |
| 567 | |
| 568 | // Invalidate the icache if necessary. |
| 569 | synchronizeICache(FnStart, FnEnd-FnStart); |
| 570 | |
| 571 | DOUT << "JIT: Finished CodeGen of [" << (void*)FnStart |
| 572 | << "] Function: " << F.getFunction()->getName() |
| 573 | << ": " << (FnEnd-FnStart) << " bytes of text, " |
| 574 | << Relocations.size() << " relocations\n"; |
| 575 | Relocations.clear(); |
| 576 | |
| 577 | #ifndef NDEBUG |
| 578 | if (sys::hasDisassembler()) |
| 579 | DOUT << "Disassembled code:\n" |
| 580 | << sys::disassembleBuffer(FnStart, FnEnd-FnStart, (uintptr_t)FnStart); |
| 581 | #endif |
Nicolas Geoffray | 0e757e1 | 2008-02-13 18:39:37 +0000 | [diff] [blame^] | 582 | if (ExceptionHandling) { |
| 583 | uintptr_t ActualSize; |
| 584 | SavedBufferBegin = BufferBegin; |
| 585 | SavedBufferEnd = BufferEnd; |
| 586 | SavedCurBufferPtr = CurBufferPtr; |
| 587 | |
| 588 | BufferBegin = CurBufferPtr = MemMgr->startExceptionTable(F.getFunction(), |
| 589 | ActualSize); |
| 590 | BufferEnd = BufferBegin+ActualSize; |
| 591 | unsigned char* FrameRegister = DE->EmitDwarfTable(F, *this, FnStart, FnEnd); |
| 592 | MemMgr->endExceptionTable(F.getFunction(), BufferBegin, CurBufferPtr, FrameRegister); |
| 593 | BufferBegin = SavedBufferBegin; |
| 594 | BufferEnd = SavedBufferEnd; |
| 595 | CurBufferPtr = SavedCurBufferPtr; |
| 596 | |
| 597 | TheJIT->RegisterTable(FrameRegister); |
| 598 | } |
| 599 | MMI->EndFunction(); |
| 600 | |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 601 | return false; |
| 602 | } |
| 603 | |
| 604 | void JITEmitter::emitConstantPool(MachineConstantPool *MCP) { |
| 605 | const std::vector<MachineConstantPoolEntry> &Constants = MCP->getConstants(); |
| 606 | if (Constants.empty()) return; |
| 607 | |
| 608 | MachineConstantPoolEntry CPE = Constants.back(); |
| 609 | unsigned Size = CPE.Offset; |
| 610 | const Type *Ty = CPE.isMachineConstantPoolEntry() |
| 611 | ? CPE.Val.MachineCPVal->getType() : CPE.Val.ConstVal->getType(); |
Duncan Sands | f99fdc6 | 2007-11-01 20:53:16 +0000 | [diff] [blame] | 612 | Size += TheJIT->getTargetData()->getABITypeSize(Ty); |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 613 | |
| 614 | ConstantPoolBase = allocateSpace(Size, 1 << MCP->getConstantPoolAlignment()); |
| 615 | ConstantPool = MCP; |
| 616 | |
| 617 | if (ConstantPoolBase == 0) return; // Buffer overflow. |
| 618 | |
| 619 | // Initialize the memory for all of the constant pool entries. |
| 620 | for (unsigned i = 0, e = Constants.size(); i != e; ++i) { |
| 621 | void *CAddr = (char*)ConstantPoolBase+Constants[i].Offset; |
| 622 | if (Constants[i].isMachineConstantPoolEntry()) { |
| 623 | // FIXME: add support to lower machine constant pool values into bytes! |
| 624 | cerr << "Initialize memory with machine specific constant pool entry" |
| 625 | << " has not been implemented!\n"; |
| 626 | abort(); |
| 627 | } |
| 628 | TheJIT->InitializeMemory(Constants[i].Val.ConstVal, CAddr); |
| 629 | } |
| 630 | } |
| 631 | |
| 632 | void JITEmitter::initJumpTableInfo(MachineJumpTableInfo *MJTI) { |
| 633 | const std::vector<MachineJumpTableEntry> &JT = MJTI->getJumpTables(); |
| 634 | if (JT.empty()) return; |
| 635 | |
| 636 | unsigned NumEntries = 0; |
| 637 | for (unsigned i = 0, e = JT.size(); i != e; ++i) |
| 638 | NumEntries += JT[i].MBBs.size(); |
| 639 | |
| 640 | unsigned EntrySize = MJTI->getEntrySize(); |
| 641 | |
| 642 | // Just allocate space for all the jump tables now. We will fix up the actual |
| 643 | // MBB entries in the tables after we emit the code for each block, since then |
| 644 | // we will know the final locations of the MBBs in memory. |
| 645 | JumpTable = MJTI; |
| 646 | JumpTableBase = allocateSpace(NumEntries * EntrySize, MJTI->getAlignment()); |
| 647 | } |
| 648 | |
| 649 | void JITEmitter::emitJumpTableInfo(MachineJumpTableInfo *MJTI) { |
| 650 | const std::vector<MachineJumpTableEntry> &JT = MJTI->getJumpTables(); |
| 651 | if (JT.empty() || JumpTableBase == 0) return; |
| 652 | |
| 653 | if (TargetMachine::getRelocationModel() == Reloc::PIC_) { |
| 654 | assert(MJTI->getEntrySize() == 4 && "Cross JIT'ing?"); |
| 655 | // For each jump table, place the offset from the beginning of the table |
| 656 | // to the target address. |
| 657 | int *SlotPtr = (int*)JumpTableBase; |
| 658 | |
| 659 | for (unsigned i = 0, e = JT.size(); i != e; ++i) { |
| 660 | const std::vector<MachineBasicBlock*> &MBBs = JT[i].MBBs; |
| 661 | // Store the offset of the basic block for this jump table slot in the |
| 662 | // memory we allocated for the jump table in 'initJumpTableInfo' |
| 663 | intptr_t Base = (intptr_t)SlotPtr; |
Evan Cheng | af74325 | 2008-01-05 02:26:58 +0000 | [diff] [blame] | 664 | for (unsigned mi = 0, me = MBBs.size(); mi != me; ++mi) { |
| 665 | intptr_t MBBAddr = getMachineBasicBlockAddress(MBBs[mi]); |
| 666 | *SlotPtr++ = TheJIT->getJITInfo().getPICJumpTableEntry(MBBAddr, Base); |
| 667 | } |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 668 | } |
| 669 | } else { |
| 670 | assert(MJTI->getEntrySize() == sizeof(void*) && "Cross JIT'ing?"); |
| 671 | |
| 672 | // For each jump table, map each target in the jump table to the address of |
| 673 | // an emitted MachineBasicBlock. |
| 674 | intptr_t *SlotPtr = (intptr_t*)JumpTableBase; |
| 675 | |
| 676 | for (unsigned i = 0, e = JT.size(); i != e; ++i) { |
| 677 | const std::vector<MachineBasicBlock*> &MBBs = JT[i].MBBs; |
| 678 | // Store the address of the basic block for this jump table slot in the |
| 679 | // memory we allocated for the jump table in 'initJumpTableInfo' |
| 680 | for (unsigned mi = 0, me = MBBs.size(); mi != me; ++mi) |
| 681 | *SlotPtr++ = getMachineBasicBlockAddress(MBBs[mi]); |
| 682 | } |
| 683 | } |
| 684 | } |
| 685 | |
| 686 | void JITEmitter::startFunctionStub(unsigned StubSize, unsigned Alignment) { |
| 687 | SavedBufferBegin = BufferBegin; |
| 688 | SavedBufferEnd = BufferEnd; |
| 689 | SavedCurBufferPtr = CurBufferPtr; |
| 690 | |
Chris Lattner | c8ad39c | 2007-12-05 23:39:57 +0000 | [diff] [blame] | 691 | BufferBegin = CurBufferPtr = MemMgr->allocateStub(StubSize, Alignment); |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 692 | BufferEnd = BufferBegin+StubSize+1; |
| 693 | } |
| 694 | |
| 695 | void *JITEmitter::finishFunctionStub(const Function *F) { |
| 696 | NumBytes += getCurrentPCOffset(); |
| 697 | std::swap(SavedBufferBegin, BufferBegin); |
| 698 | BufferEnd = SavedBufferEnd; |
| 699 | CurBufferPtr = SavedCurBufferPtr; |
| 700 | return SavedBufferBegin; |
| 701 | } |
| 702 | |
| 703 | // getConstantPoolEntryAddress - Return the address of the 'ConstantNum' entry |
| 704 | // in the constant pool that was last emitted with the 'emitConstantPool' |
| 705 | // method. |
| 706 | // |
| 707 | intptr_t JITEmitter::getConstantPoolEntryAddress(unsigned ConstantNum) const { |
| 708 | assert(ConstantNum < ConstantPool->getConstants().size() && |
| 709 | "Invalid ConstantPoolIndex!"); |
| 710 | return (intptr_t)ConstantPoolBase + |
| 711 | ConstantPool->getConstants()[ConstantNum].Offset; |
| 712 | } |
| 713 | |
| 714 | // getJumpTableEntryAddress - Return the address of the JumpTable with index |
| 715 | // 'Index' in the jumpp table that was last initialized with 'initJumpTableInfo' |
| 716 | // |
| 717 | intptr_t JITEmitter::getJumpTableEntryAddress(unsigned Index) const { |
| 718 | const std::vector<MachineJumpTableEntry> &JT = JumpTable->getJumpTables(); |
| 719 | assert(Index < JT.size() && "Invalid jump table index!"); |
| 720 | |
| 721 | unsigned Offset = 0; |
| 722 | unsigned EntrySize = JumpTable->getEntrySize(); |
| 723 | |
| 724 | for (unsigned i = 0; i < Index; ++i) |
| 725 | Offset += JT[i].MBBs.size(); |
| 726 | |
| 727 | Offset *= EntrySize; |
| 728 | |
| 729 | return (intptr_t)((char *)JumpTableBase + Offset); |
| 730 | } |
| 731 | |
| 732 | //===----------------------------------------------------------------------===// |
| 733 | // Public interface to this file |
| 734 | //===----------------------------------------------------------------------===// |
| 735 | |
Chris Lattner | e44be00 | 2007-12-06 01:08:09 +0000 | [diff] [blame] | 736 | MachineCodeEmitter *JIT::createEmitter(JIT &jit, JITMemoryManager *JMM) { |
| 737 | return new JITEmitter(jit, JMM); |
Dan Gohman | f17a25c | 2007-07-18 16:29:46 +0000 | [diff] [blame] | 738 | } |
| 739 | |
| 740 | // getPointerToNamedFunction - This function is used as a global wrapper to |
| 741 | // JIT::getPointerToNamedFunction for the purpose of resolving symbols when |
| 742 | // bugpoint is debugging the JIT. In that scenario, we are loading an .so and |
| 743 | // need to resolve function(s) that are being mis-codegenerated, so we need to |
| 744 | // resolve their addresses at runtime, and this is the way to do it. |
| 745 | extern "C" { |
| 746 | void *getPointerToNamedFunction(const char *Name) { |
| 747 | if (Function *F = TheJIT->FindFunctionNamed(Name)) |
| 748 | return TheJIT->getPointerToFunction(F); |
| 749 | return TheJIT->getPointerToNamedFunction(Name); |
| 750 | } |
| 751 | } |
| 752 | |
| 753 | // getPointerToFunctionOrStub - If the specified function has been |
| 754 | // code-gen'd, return a pointer to the function. If not, compile it, or use |
| 755 | // a stub to implement lazy compilation if available. |
| 756 | // |
| 757 | void *JIT::getPointerToFunctionOrStub(Function *F) { |
| 758 | // If we have already code generated the function, just return the address. |
| 759 | if (void *Addr = getPointerToGlobalIfAvailable(F)) |
| 760 | return Addr; |
| 761 | |
| 762 | // Get a stub if the target supports it. |
| 763 | assert(dynamic_cast<JITEmitter*>(MCE) && "Unexpected MCE?"); |
| 764 | JITEmitter *JE = static_cast<JITEmitter*>(getCodeEmitter()); |
| 765 | return JE->getJITResolver().getFunctionStub(F); |
| 766 | } |
| 767 | |
| 768 | /// freeMachineCodeForFunction - release machine code memory for given Function. |
| 769 | /// |
| 770 | void JIT::freeMachineCodeForFunction(Function *F) { |
| 771 | // Delete translation for this from the ExecutionEngine, so it will get |
| 772 | // retranslated next time it is used. |
| 773 | updateGlobalMapping(F, 0); |
| 774 | |
| 775 | // Free the actual memory for the function body and related stuff. |
| 776 | assert(dynamic_cast<JITEmitter*>(MCE) && "Unexpected MCE?"); |
| 777 | static_cast<JITEmitter*>(MCE)->deallocateMemForFunction(F); |
| 778 | } |
| 779 | |