John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 1 | //===-- CGCleanup.h - Classes for cleanups IR generation --------*- C++ -*-===// |
| 2 | // |
| 3 | // The LLVM Compiler Infrastructure |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // These classes support the generation of LLVM IR for cleanups. |
| 11 | // |
| 12 | //===----------------------------------------------------------------------===// |
| 13 | |
| 14 | #ifndef CLANG_CODEGEN_CGCLEANUP_H |
| 15 | #define CLANG_CODEGEN_CGCLEANUP_H |
| 16 | |
Reid Kleckner | 6fe91e4 | 2013-06-09 16:45:02 +0000 | [diff] [blame] | 17 | #include "clang/Basic/LLVM.h" |
| 18 | #include "llvm/ADT/SmallPtrSet.h" |
| 19 | #include "llvm/ADT/SmallVector.h" |
Reid Kleckner | 875e739 | 2013-06-09 16:56:53 +0000 | [diff] [blame] | 20 | #include "llvm/IR/BasicBlock.h" |
| 21 | #include "llvm/IR/Value.h" |
| 22 | #include "llvm/IR/Instructions.h" |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 23 | |
| 24 | namespace clang { |
| 25 | namespace CodeGen { |
| 26 | |
Reid Kleckner | 6fe91e4 | 2013-06-09 16:45:02 +0000 | [diff] [blame] | 27 | class CodeGenFunction; |
| 28 | |
| 29 | /// A branch fixup. These are required when emitting a goto to a |
| 30 | /// label which hasn't been emitted yet. The goto is optimistically |
| 31 | /// emitted as a branch to the basic block for the label, and (if it |
| 32 | /// occurs in a scope with non-trivial cleanups) a fixup is added to |
| 33 | /// the innermost cleanup. When a (normal) cleanup is popped, any |
| 34 | /// unresolved fixups in that scope are threaded through the cleanup. |
| 35 | struct BranchFixup { |
| 36 | /// The block containing the terminator which needs to be modified |
| 37 | /// into a switch if this fixup is resolved into the current scope. |
| 38 | /// If null, LatestBranch points directly to the destination. |
| 39 | llvm::BasicBlock *OptimisticBranchBlock; |
| 40 | |
| 41 | /// The ultimate destination of the branch. |
| 42 | /// |
| 43 | /// This can be set to null to indicate that this fixup was |
| 44 | /// successfully resolved. |
| 45 | llvm::BasicBlock *Destination; |
| 46 | |
| 47 | /// The destination index value. |
| 48 | unsigned DestinationIndex; |
| 49 | |
| 50 | /// The initial branch of the fixup. |
| 51 | llvm::BranchInst *InitialBranch; |
| 52 | }; |
| 53 | |
| 54 | template <class T> struct InvariantValue { |
| 55 | typedef T type; |
| 56 | typedef T saved_type; |
| 57 | static bool needsSaving(type value) { return false; } |
| 58 | static saved_type save(CodeGenFunction &CGF, type value) { return value; } |
| 59 | static type restore(CodeGenFunction &CGF, saved_type value) { return value; } |
| 60 | }; |
| 61 | |
| 62 | /// A metaprogramming class for ensuring that a value will dominate an |
| 63 | /// arbitrary position in a function. |
| 64 | template <class T> struct DominatingValue : InvariantValue<T> {}; |
| 65 | |
| 66 | template <class T, bool mightBeInstruction = |
| 67 | llvm::is_base_of<llvm::Value, T>::value && |
| 68 | !llvm::is_base_of<llvm::Constant, T>::value && |
| 69 | !llvm::is_base_of<llvm::BasicBlock, T>::value> |
| 70 | struct DominatingPointer; |
| 71 | template <class T> struct DominatingPointer<T,false> : InvariantValue<T*> {}; |
| 72 | // template <class T> struct DominatingPointer<T,true> at end of file |
| 73 | |
| 74 | template <class T> struct DominatingValue<T*> : DominatingPointer<T> {}; |
| 75 | |
| 76 | enum CleanupKind { |
| 77 | EHCleanup = 0x1, |
| 78 | NormalCleanup = 0x2, |
| 79 | NormalAndEHCleanup = EHCleanup | NormalCleanup, |
| 80 | |
| 81 | InactiveCleanup = 0x4, |
| 82 | InactiveEHCleanup = EHCleanup | InactiveCleanup, |
| 83 | InactiveNormalCleanup = NormalCleanup | InactiveCleanup, |
| 84 | InactiveNormalAndEHCleanup = NormalAndEHCleanup | InactiveCleanup |
| 85 | }; |
| 86 | |
| 87 | /// A stack of scopes which respond to exceptions, including cleanups |
| 88 | /// and catch blocks. |
| 89 | class EHScopeStack { |
| 90 | public: |
| 91 | /// A saved depth on the scope stack. This is necessary because |
| 92 | /// pushing scopes onto the stack invalidates iterators. |
| 93 | class stable_iterator { |
| 94 | friend class EHScopeStack; |
| 95 | |
| 96 | /// Offset from StartOfData to EndOfBuffer. |
| 97 | ptrdiff_t Size; |
| 98 | |
| 99 | stable_iterator(ptrdiff_t Size) : Size(Size) {} |
| 100 | |
| 101 | public: |
| 102 | static stable_iterator invalid() { return stable_iterator(-1); } |
| 103 | stable_iterator() : Size(-1) {} |
| 104 | |
| 105 | bool isValid() const { return Size >= 0; } |
| 106 | |
| 107 | /// Returns true if this scope encloses I. |
| 108 | /// Returns false if I is invalid. |
| 109 | /// This scope must be valid. |
| 110 | bool encloses(stable_iterator I) const { return Size <= I.Size; } |
| 111 | |
| 112 | /// Returns true if this scope strictly encloses I: that is, |
| 113 | /// if it encloses I and is not I. |
| 114 | /// Returns false is I is invalid. |
| 115 | /// This scope must be valid. |
| 116 | bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; } |
| 117 | |
| 118 | friend bool operator==(stable_iterator A, stable_iterator B) { |
| 119 | return A.Size == B.Size; |
| 120 | } |
| 121 | friend bool operator!=(stable_iterator A, stable_iterator B) { |
| 122 | return A.Size != B.Size; |
| 123 | } |
| 124 | }; |
| 125 | |
| 126 | /// Information for lazily generating a cleanup. Subclasses must be |
| 127 | /// POD-like: cleanups will not be destructed, and they will be |
| 128 | /// allocated on the cleanup stack and freely copied and moved |
| 129 | /// around. |
| 130 | /// |
| 131 | /// Cleanup implementations should generally be declared in an |
| 132 | /// anonymous namespace. |
| 133 | class Cleanup { |
| 134 | // Anchor the construction vtable. |
| 135 | virtual void anchor(); |
| 136 | public: |
| 137 | /// Generation flags. |
| 138 | class Flags { |
| 139 | enum { |
| 140 | F_IsForEH = 0x1, |
| 141 | F_IsNormalCleanupKind = 0x2, |
| 142 | F_IsEHCleanupKind = 0x4 |
| 143 | }; |
| 144 | unsigned flags; |
| 145 | |
| 146 | public: |
| 147 | Flags() : flags(0) {} |
| 148 | |
| 149 | /// isForEH - true if the current emission is for an EH cleanup. |
| 150 | bool isForEHCleanup() const { return flags & F_IsForEH; } |
| 151 | bool isForNormalCleanup() const { return !isForEHCleanup(); } |
| 152 | void setIsForEHCleanup() { flags |= F_IsForEH; } |
| 153 | |
| 154 | bool isNormalCleanupKind() const { return flags & F_IsNormalCleanupKind; } |
| 155 | void setIsNormalCleanupKind() { flags |= F_IsNormalCleanupKind; } |
| 156 | |
| 157 | /// isEHCleanupKind - true if the cleanup was pushed as an EH |
| 158 | /// cleanup. |
| 159 | bool isEHCleanupKind() const { return flags & F_IsEHCleanupKind; } |
| 160 | void setIsEHCleanupKind() { flags |= F_IsEHCleanupKind; } |
| 161 | }; |
| 162 | |
| 163 | // Provide a virtual destructor to suppress a very common warning |
| 164 | // that unfortunately cannot be suppressed without this. Cleanups |
| 165 | // should not rely on this destructor ever being called. |
| 166 | virtual ~Cleanup() {} |
| 167 | |
| 168 | /// Emit the cleanup. For normal cleanups, this is run in the |
| 169 | /// same EH context as when the cleanup was pushed, i.e. the |
| 170 | /// immediately-enclosing context of the cleanup scope. For |
| 171 | /// EH cleanups, this is run in a terminate context. |
| 172 | /// |
| 173 | // \param flags cleanup kind. |
| 174 | virtual void Emit(CodeGenFunction &CGF, Flags flags) = 0; |
| 175 | }; |
| 176 | |
| 177 | /// ConditionalCleanupN stores the saved form of its N parameters, |
| 178 | /// then restores them and performs the cleanup. |
| 179 | template <class T, class A0> |
| 180 | class ConditionalCleanup1 : public Cleanup { |
| 181 | typedef typename DominatingValue<A0>::saved_type A0_saved; |
| 182 | A0_saved a0_saved; |
| 183 | |
| 184 | void Emit(CodeGenFunction &CGF, Flags flags) { |
| 185 | A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved); |
| 186 | T(a0).Emit(CGF, flags); |
| 187 | } |
| 188 | |
| 189 | public: |
| 190 | ConditionalCleanup1(A0_saved a0) |
| 191 | : a0_saved(a0) {} |
| 192 | }; |
| 193 | |
| 194 | template <class T, class A0, class A1> |
| 195 | class ConditionalCleanup2 : public Cleanup { |
| 196 | typedef typename DominatingValue<A0>::saved_type A0_saved; |
| 197 | typedef typename DominatingValue<A1>::saved_type A1_saved; |
| 198 | A0_saved a0_saved; |
| 199 | A1_saved a1_saved; |
| 200 | |
| 201 | void Emit(CodeGenFunction &CGF, Flags flags) { |
| 202 | A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved); |
| 203 | A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved); |
| 204 | T(a0, a1).Emit(CGF, flags); |
| 205 | } |
| 206 | |
| 207 | public: |
| 208 | ConditionalCleanup2(A0_saved a0, A1_saved a1) |
| 209 | : a0_saved(a0), a1_saved(a1) {} |
| 210 | }; |
| 211 | |
| 212 | template <class T, class A0, class A1, class A2> |
| 213 | class ConditionalCleanup3 : public Cleanup { |
| 214 | typedef typename DominatingValue<A0>::saved_type A0_saved; |
| 215 | typedef typename DominatingValue<A1>::saved_type A1_saved; |
| 216 | typedef typename DominatingValue<A2>::saved_type A2_saved; |
| 217 | A0_saved a0_saved; |
| 218 | A1_saved a1_saved; |
| 219 | A2_saved a2_saved; |
| 220 | |
| 221 | void Emit(CodeGenFunction &CGF, Flags flags) { |
| 222 | A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved); |
| 223 | A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved); |
| 224 | A2 a2 = DominatingValue<A2>::restore(CGF, a2_saved); |
| 225 | T(a0, a1, a2).Emit(CGF, flags); |
| 226 | } |
| 227 | |
| 228 | public: |
| 229 | ConditionalCleanup3(A0_saved a0, A1_saved a1, A2_saved a2) |
| 230 | : a0_saved(a0), a1_saved(a1), a2_saved(a2) {} |
| 231 | }; |
| 232 | |
| 233 | template <class T, class A0, class A1, class A2, class A3> |
| 234 | class ConditionalCleanup4 : public Cleanup { |
| 235 | typedef typename DominatingValue<A0>::saved_type A0_saved; |
| 236 | typedef typename DominatingValue<A1>::saved_type A1_saved; |
| 237 | typedef typename DominatingValue<A2>::saved_type A2_saved; |
| 238 | typedef typename DominatingValue<A3>::saved_type A3_saved; |
| 239 | A0_saved a0_saved; |
| 240 | A1_saved a1_saved; |
| 241 | A2_saved a2_saved; |
| 242 | A3_saved a3_saved; |
| 243 | |
| 244 | void Emit(CodeGenFunction &CGF, Flags flags) { |
| 245 | A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved); |
| 246 | A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved); |
| 247 | A2 a2 = DominatingValue<A2>::restore(CGF, a2_saved); |
| 248 | A3 a3 = DominatingValue<A3>::restore(CGF, a3_saved); |
| 249 | T(a0, a1, a2, a3).Emit(CGF, flags); |
| 250 | } |
| 251 | |
| 252 | public: |
| 253 | ConditionalCleanup4(A0_saved a0, A1_saved a1, A2_saved a2, A3_saved a3) |
| 254 | : a0_saved(a0), a1_saved(a1), a2_saved(a2), a3_saved(a3) {} |
| 255 | }; |
| 256 | |
| 257 | private: |
| 258 | // The implementation for this class is in CGException.h and |
| 259 | // CGException.cpp; the definition is here because it's used as a |
| 260 | // member of CodeGenFunction. |
| 261 | |
| 262 | /// The start of the scope-stack buffer, i.e. the allocated pointer |
| 263 | /// for the buffer. All of these pointers are either simultaneously |
| 264 | /// null or simultaneously valid. |
| 265 | char *StartOfBuffer; |
| 266 | |
| 267 | /// The end of the buffer. |
| 268 | char *EndOfBuffer; |
| 269 | |
| 270 | /// The first valid entry in the buffer. |
| 271 | char *StartOfData; |
| 272 | |
| 273 | /// The innermost normal cleanup on the stack. |
| 274 | stable_iterator InnermostNormalCleanup; |
| 275 | |
| 276 | /// The innermost EH scope on the stack. |
| 277 | stable_iterator InnermostEHScope; |
| 278 | |
| 279 | /// The current set of branch fixups. A branch fixup is a jump to |
| 280 | /// an as-yet unemitted label, i.e. a label for which we don't yet |
| 281 | /// know the EH stack depth. Whenever we pop a cleanup, we have |
| 282 | /// to thread all the current branch fixups through it. |
| 283 | /// |
| 284 | /// Fixups are recorded as the Use of the respective branch or |
| 285 | /// switch statement. The use points to the final destination. |
| 286 | /// When popping out of a cleanup, these uses are threaded through |
| 287 | /// the cleanup and adjusted to point to the new cleanup. |
| 288 | /// |
| 289 | /// Note that branches are allowed to jump into protected scopes |
| 290 | /// in certain situations; e.g. the following code is legal: |
| 291 | /// struct A { ~A(); }; // trivial ctor, non-trivial dtor |
| 292 | /// goto foo; |
| 293 | /// A a; |
| 294 | /// foo: |
| 295 | /// bar(); |
| 296 | SmallVector<BranchFixup, 8> BranchFixups; |
| 297 | |
| 298 | char *allocate(size_t Size); |
| 299 | |
| 300 | void *pushCleanup(CleanupKind K, size_t DataSize); |
| 301 | |
| 302 | public: |
| 303 | EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0), |
| 304 | InnermostNormalCleanup(stable_end()), |
| 305 | InnermostEHScope(stable_end()) {} |
| 306 | ~EHScopeStack() { delete[] StartOfBuffer; } |
| 307 | |
| 308 | // Variadic templates would make this not terrible. |
| 309 | |
| 310 | /// Push a lazily-created cleanup on the stack. |
| 311 | template <class T> |
| 312 | void pushCleanup(CleanupKind Kind) { |
| 313 | void *Buffer = pushCleanup(Kind, sizeof(T)); |
| 314 | Cleanup *Obj = new(Buffer) T(); |
| 315 | (void) Obj; |
| 316 | } |
| 317 | |
| 318 | /// Push a lazily-created cleanup on the stack. |
| 319 | template <class T, class A0> |
| 320 | void pushCleanup(CleanupKind Kind, A0 a0) { |
| 321 | void *Buffer = pushCleanup(Kind, sizeof(T)); |
| 322 | Cleanup *Obj = new(Buffer) T(a0); |
| 323 | (void) Obj; |
| 324 | } |
| 325 | |
| 326 | /// Push a lazily-created cleanup on the stack. |
| 327 | template <class T, class A0, class A1> |
| 328 | void pushCleanup(CleanupKind Kind, A0 a0, A1 a1) { |
| 329 | void *Buffer = pushCleanup(Kind, sizeof(T)); |
| 330 | Cleanup *Obj = new(Buffer) T(a0, a1); |
| 331 | (void) Obj; |
| 332 | } |
| 333 | |
| 334 | /// Push a lazily-created cleanup on the stack. |
| 335 | template <class T, class A0, class A1, class A2> |
| 336 | void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) { |
| 337 | void *Buffer = pushCleanup(Kind, sizeof(T)); |
| 338 | Cleanup *Obj = new(Buffer) T(a0, a1, a2); |
| 339 | (void) Obj; |
| 340 | } |
| 341 | |
| 342 | /// Push a lazily-created cleanup on the stack. |
| 343 | template <class T, class A0, class A1, class A2, class A3> |
| 344 | void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) { |
| 345 | void *Buffer = pushCleanup(Kind, sizeof(T)); |
| 346 | Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3); |
| 347 | (void) Obj; |
| 348 | } |
| 349 | |
| 350 | /// Push a lazily-created cleanup on the stack. |
| 351 | template <class T, class A0, class A1, class A2, class A3, class A4> |
| 352 | void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) { |
| 353 | void *Buffer = pushCleanup(Kind, sizeof(T)); |
| 354 | Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4); |
| 355 | (void) Obj; |
| 356 | } |
| 357 | |
| 358 | // Feel free to add more variants of the following: |
| 359 | |
| 360 | /// Push a cleanup with non-constant storage requirements on the |
| 361 | /// stack. The cleanup type must provide an additional static method: |
| 362 | /// static size_t getExtraSize(size_t); |
| 363 | /// The argument to this method will be the value N, which will also |
| 364 | /// be passed as the first argument to the constructor. |
| 365 | /// |
| 366 | /// The data stored in the extra storage must obey the same |
| 367 | /// restrictions as normal cleanup member data. |
| 368 | /// |
| 369 | /// The pointer returned from this method is valid until the cleanup |
| 370 | /// stack is modified. |
| 371 | template <class T, class A0, class A1, class A2> |
| 372 | T *pushCleanupWithExtra(CleanupKind Kind, size_t N, A0 a0, A1 a1, A2 a2) { |
| 373 | void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N)); |
| 374 | return new (Buffer) T(N, a0, a1, a2); |
| 375 | } |
| 376 | |
Richard Smith | 8a07cd3 | 2013-06-12 20:42:33 +0000 | [diff] [blame] | 377 | void pushCopyOfCleanup(CleanupKind Kind, const void *Cleanup, size_t Size) { |
| 378 | void *Buffer = pushCleanup(Kind, Size); |
| 379 | std::memcpy(Buffer, Cleanup, Size); |
| 380 | } |
| 381 | |
Reid Kleckner | 6fe91e4 | 2013-06-09 16:45:02 +0000 | [diff] [blame] | 382 | /// Pops a cleanup scope off the stack. This is private to CGCleanup.cpp. |
| 383 | void popCleanup(); |
| 384 | |
| 385 | /// Push a set of catch handlers on the stack. The catch is |
| 386 | /// uninitialized and will need to have the given number of handlers |
| 387 | /// set on it. |
| 388 | class EHCatchScope *pushCatch(unsigned NumHandlers); |
| 389 | |
| 390 | /// Pops a catch scope off the stack. This is private to CGException.cpp. |
| 391 | void popCatch(); |
| 392 | |
| 393 | /// Push an exceptions filter on the stack. |
| 394 | class EHFilterScope *pushFilter(unsigned NumFilters); |
| 395 | |
| 396 | /// Pops an exceptions filter off the stack. |
| 397 | void popFilter(); |
| 398 | |
| 399 | /// Push a terminate handler on the stack. |
| 400 | void pushTerminate(); |
| 401 | |
| 402 | /// Pops a terminate handler off the stack. |
| 403 | void popTerminate(); |
| 404 | |
| 405 | /// Determines whether the exception-scopes stack is empty. |
| 406 | bool empty() const { return StartOfData == EndOfBuffer; } |
| 407 | |
| 408 | bool requiresLandingPad() const { |
| 409 | return InnermostEHScope != stable_end(); |
| 410 | } |
| 411 | |
| 412 | /// Determines whether there are any normal cleanups on the stack. |
| 413 | bool hasNormalCleanups() const { |
| 414 | return InnermostNormalCleanup != stable_end(); |
| 415 | } |
| 416 | |
| 417 | /// Returns the innermost normal cleanup on the stack, or |
| 418 | /// stable_end() if there are no normal cleanups. |
| 419 | stable_iterator getInnermostNormalCleanup() const { |
| 420 | return InnermostNormalCleanup; |
| 421 | } |
| 422 | stable_iterator getInnermostActiveNormalCleanup() const; |
| 423 | |
| 424 | stable_iterator getInnermostEHScope() const { |
| 425 | return InnermostEHScope; |
| 426 | } |
| 427 | |
| 428 | stable_iterator getInnermostActiveEHScope() const; |
| 429 | |
| 430 | /// An unstable reference to a scope-stack depth. Invalidated by |
| 431 | /// pushes but not pops. |
| 432 | class iterator; |
| 433 | |
| 434 | /// Returns an iterator pointing to the innermost EH scope. |
| 435 | iterator begin() const; |
| 436 | |
| 437 | /// Returns an iterator pointing to the outermost EH scope. |
| 438 | iterator end() const; |
| 439 | |
| 440 | /// Create a stable reference to the top of the EH stack. The |
| 441 | /// returned reference is valid until that scope is popped off the |
| 442 | /// stack. |
| 443 | stable_iterator stable_begin() const { |
| 444 | return stable_iterator(EndOfBuffer - StartOfData); |
| 445 | } |
| 446 | |
| 447 | /// Create a stable reference to the bottom of the EH stack. |
| 448 | static stable_iterator stable_end() { |
| 449 | return stable_iterator(0); |
| 450 | } |
| 451 | |
| 452 | /// Translates an iterator into a stable_iterator. |
| 453 | stable_iterator stabilize(iterator it) const; |
| 454 | |
| 455 | /// Turn a stable reference to a scope depth into a unstable pointer |
| 456 | /// to the EH stack. |
| 457 | iterator find(stable_iterator save) const; |
| 458 | |
| 459 | /// Removes the cleanup pointed to by the given stable_iterator. |
| 460 | void removeCleanup(stable_iterator save); |
| 461 | |
| 462 | /// Add a branch fixup to the current cleanup scope. |
| 463 | BranchFixup &addBranchFixup() { |
| 464 | assert(hasNormalCleanups() && "adding fixup in scope without cleanups"); |
| 465 | BranchFixups.push_back(BranchFixup()); |
| 466 | return BranchFixups.back(); |
| 467 | } |
| 468 | |
| 469 | unsigned getNumBranchFixups() const { return BranchFixups.size(); } |
| 470 | BranchFixup &getBranchFixup(unsigned I) { |
| 471 | assert(I < getNumBranchFixups()); |
| 472 | return BranchFixups[I]; |
| 473 | } |
| 474 | |
| 475 | /// Pops lazily-removed fixups from the end of the list. This |
| 476 | /// should only be called by procedures which have just popped a |
| 477 | /// cleanup or resolved one or more fixups. |
| 478 | void popNullFixups(); |
| 479 | |
| 480 | /// Clears the branch-fixups list. This should only be called by |
| 481 | /// ResolveAllBranchFixups. |
| 482 | void clearFixups() { BranchFixups.clear(); } |
| 483 | }; |
| 484 | |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 485 | /// A protected scope for zero-cost EH handling. |
| 486 | class EHScope { |
| 487 | llvm::BasicBlock *CachedLandingPad; |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 488 | llvm::BasicBlock *CachedEHDispatchBlock; |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 489 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 490 | EHScopeStack::stable_iterator EnclosingEHScope; |
| 491 | |
| 492 | class CommonBitFields { |
| 493 | friend class EHScope; |
| 494 | unsigned Kind : 2; |
| 495 | }; |
| 496 | enum { NumCommonBits = 2 }; |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 497 | |
| 498 | protected: |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 499 | class CatchBitFields { |
| 500 | friend class EHCatchScope; |
| 501 | unsigned : NumCommonBits; |
| 502 | |
| 503 | unsigned NumHandlers : 32 - NumCommonBits; |
| 504 | }; |
| 505 | |
| 506 | class CleanupBitFields { |
| 507 | friend class EHCleanupScope; |
| 508 | unsigned : NumCommonBits; |
| 509 | |
| 510 | /// Whether this cleanup needs to be run along normal edges. |
| 511 | unsigned IsNormalCleanup : 1; |
| 512 | |
| 513 | /// Whether this cleanup needs to be run along exception edges. |
| 514 | unsigned IsEHCleanup : 1; |
| 515 | |
| 516 | /// Whether this cleanup is currently active. |
| 517 | unsigned IsActive : 1; |
| 518 | |
| 519 | /// Whether the normal cleanup should test the activation flag. |
| 520 | unsigned TestFlagInNormalCleanup : 1; |
| 521 | |
| 522 | /// Whether the EH cleanup should test the activation flag. |
| 523 | unsigned TestFlagInEHCleanup : 1; |
| 524 | |
| 525 | /// The amount of extra storage needed by the Cleanup. |
| 526 | /// Always a multiple of the scope-stack alignment. |
| 527 | unsigned CleanupSize : 12; |
| 528 | |
| 529 | /// The number of fixups required by enclosing scopes (not including |
| 530 | /// this one). If this is the top cleanup scope, all the fixups |
| 531 | /// from this index onwards belong to this scope. |
| 532 | unsigned FixupDepth : 32 - 17 - NumCommonBits; // currently 13 |
| 533 | }; |
| 534 | |
| 535 | class FilterBitFields { |
| 536 | friend class EHFilterScope; |
| 537 | unsigned : NumCommonBits; |
| 538 | |
| 539 | unsigned NumFilters : 32 - NumCommonBits; |
| 540 | }; |
| 541 | |
| 542 | union { |
| 543 | CommonBitFields CommonBits; |
| 544 | CatchBitFields CatchBits; |
| 545 | CleanupBitFields CleanupBits; |
| 546 | FilterBitFields FilterBits; |
| 547 | }; |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 548 | |
| 549 | public: |
| 550 | enum Kind { Cleanup, Catch, Terminate, Filter }; |
| 551 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 552 | EHScope(Kind kind, EHScopeStack::stable_iterator enclosingEHScope) |
| 553 | : CachedLandingPad(0), CachedEHDispatchBlock(0), |
| 554 | EnclosingEHScope(enclosingEHScope) { |
| 555 | CommonBits.Kind = kind; |
| 556 | } |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 557 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 558 | Kind getKind() const { return static_cast<Kind>(CommonBits.Kind); } |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 559 | |
| 560 | llvm::BasicBlock *getCachedLandingPad() const { |
| 561 | return CachedLandingPad; |
| 562 | } |
| 563 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 564 | void setCachedLandingPad(llvm::BasicBlock *block) { |
| 565 | CachedLandingPad = block; |
| 566 | } |
| 567 | |
| 568 | llvm::BasicBlock *getCachedEHDispatchBlock() const { |
| 569 | return CachedEHDispatchBlock; |
| 570 | } |
| 571 | |
| 572 | void setCachedEHDispatchBlock(llvm::BasicBlock *block) { |
| 573 | CachedEHDispatchBlock = block; |
| 574 | } |
| 575 | |
| 576 | bool hasEHBranches() const { |
| 577 | if (llvm::BasicBlock *block = getCachedEHDispatchBlock()) |
| 578 | return !block->use_empty(); |
| 579 | return false; |
| 580 | } |
| 581 | |
| 582 | EHScopeStack::stable_iterator getEnclosingEHScope() const { |
| 583 | return EnclosingEHScope; |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 584 | } |
| 585 | }; |
| 586 | |
| 587 | /// A scope which attempts to handle some, possibly all, types of |
| 588 | /// exceptions. |
| 589 | /// |
James Dennett | 2ee5ba3 | 2012-06-15 22:10:14 +0000 | [diff] [blame] | 590 | /// Objective C \@finally blocks are represented using a cleanup scope |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 591 | /// after the catch scope. |
| 592 | class EHCatchScope : public EHScope { |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 593 | // In effect, we have a flexible array member |
| 594 | // Handler Handlers[0]; |
| 595 | // But that's only standard in C99, not C++, so we have to do |
| 596 | // annoying pointer arithmetic instead. |
| 597 | |
| 598 | public: |
| 599 | struct Handler { |
| 600 | /// A type info value, or null (C++ null, not an LLVM null pointer) |
| 601 | /// for a catch-all. |
| 602 | llvm::Value *Type; |
| 603 | |
| 604 | /// The catch handler for this type. |
| 605 | llvm::BasicBlock *Block; |
| 606 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 607 | bool isCatchAll() const { return Type == 0; } |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 608 | }; |
| 609 | |
| 610 | private: |
| 611 | friend class EHScopeStack; |
| 612 | |
| 613 | Handler *getHandlers() { |
| 614 | return reinterpret_cast<Handler*>(this+1); |
| 615 | } |
| 616 | |
| 617 | const Handler *getHandlers() const { |
| 618 | return reinterpret_cast<const Handler*>(this+1); |
| 619 | } |
| 620 | |
| 621 | public: |
| 622 | static size_t getSizeForNumHandlers(unsigned N) { |
| 623 | return sizeof(EHCatchScope) + N * sizeof(Handler); |
| 624 | } |
| 625 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 626 | EHCatchScope(unsigned numHandlers, |
| 627 | EHScopeStack::stable_iterator enclosingEHScope) |
| 628 | : EHScope(Catch, enclosingEHScope) { |
| 629 | CatchBits.NumHandlers = numHandlers; |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 630 | } |
| 631 | |
| 632 | unsigned getNumHandlers() const { |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 633 | return CatchBits.NumHandlers; |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 634 | } |
| 635 | |
| 636 | void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) { |
| 637 | setHandler(I, /*catchall*/ 0, Block); |
| 638 | } |
| 639 | |
| 640 | void setHandler(unsigned I, llvm::Value *Type, llvm::BasicBlock *Block) { |
| 641 | assert(I < getNumHandlers()); |
| 642 | getHandlers()[I].Type = Type; |
| 643 | getHandlers()[I].Block = Block; |
| 644 | } |
| 645 | |
| 646 | const Handler &getHandler(unsigned I) const { |
| 647 | assert(I < getNumHandlers()); |
| 648 | return getHandlers()[I]; |
| 649 | } |
| 650 | |
| 651 | typedef const Handler *iterator; |
| 652 | iterator begin() const { return getHandlers(); } |
| 653 | iterator end() const { return getHandlers() + getNumHandlers(); } |
| 654 | |
| 655 | static bool classof(const EHScope *Scope) { |
| 656 | return Scope->getKind() == Catch; |
| 657 | } |
| 658 | }; |
| 659 | |
| 660 | /// A cleanup scope which generates the cleanup blocks lazily. |
| 661 | class EHCleanupScope : public EHScope { |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 662 | /// The nearest normal cleanup scope enclosing this one. |
| 663 | EHScopeStack::stable_iterator EnclosingNormal; |
| 664 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 665 | /// The nearest EH scope enclosing this one. |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 666 | EHScopeStack::stable_iterator EnclosingEH; |
| 667 | |
| 668 | /// The dual entry/exit block along the normal edge. This is lazily |
| 669 | /// created if needed before the cleanup is popped. |
| 670 | llvm::BasicBlock *NormalBlock; |
| 671 | |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 672 | /// An optional i1 variable indicating whether this cleanup has been |
| 673 | /// activated yet. |
| 674 | llvm::AllocaInst *ActiveFlag; |
| 675 | |
| 676 | /// Extra information required for cleanups that have resolved |
| 677 | /// branches through them. This has to be allocated on the side |
| 678 | /// because everything on the cleanup stack has be trivially |
| 679 | /// movable. |
| 680 | struct ExtInfo { |
| 681 | /// The destinations of normal branch-afters and branch-throughs. |
| 682 | llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches; |
| 683 | |
| 684 | /// Normal branch-afters. |
Chris Lattner | 686775d | 2011-07-20 06:58:45 +0000 | [diff] [blame] | 685 | SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4> |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 686 | BranchAfters; |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 687 | }; |
| 688 | mutable struct ExtInfo *ExtInfo; |
| 689 | |
| 690 | struct ExtInfo &getExtInfo() { |
| 691 | if (!ExtInfo) ExtInfo = new struct ExtInfo(); |
| 692 | return *ExtInfo; |
| 693 | } |
| 694 | |
| 695 | const struct ExtInfo &getExtInfo() const { |
| 696 | if (!ExtInfo) ExtInfo = new struct ExtInfo(); |
| 697 | return *ExtInfo; |
| 698 | } |
| 699 | |
| 700 | public: |
| 701 | /// Gets the size required for a lazy cleanup scope with the given |
| 702 | /// cleanup-data requirements. |
| 703 | static size_t getSizeForCleanupSize(size_t Size) { |
| 704 | return sizeof(EHCleanupScope) + Size; |
| 705 | } |
| 706 | |
| 707 | size_t getAllocatedSize() const { |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 708 | return sizeof(EHCleanupScope) + CleanupBits.CleanupSize; |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 709 | } |
| 710 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 711 | EHCleanupScope(bool isNormal, bool isEH, bool isActive, |
| 712 | unsigned cleanupSize, unsigned fixupDepth, |
| 713 | EHScopeStack::stable_iterator enclosingNormal, |
| 714 | EHScopeStack::stable_iterator enclosingEH) |
| 715 | : EHScope(EHScope::Cleanup, enclosingEH), EnclosingNormal(enclosingNormal), |
| 716 | NormalBlock(0), ActiveFlag(0), ExtInfo(0) { |
| 717 | CleanupBits.IsNormalCleanup = isNormal; |
| 718 | CleanupBits.IsEHCleanup = isEH; |
| 719 | CleanupBits.IsActive = isActive; |
| 720 | CleanupBits.TestFlagInNormalCleanup = false; |
| 721 | CleanupBits.TestFlagInEHCleanup = false; |
| 722 | CleanupBits.CleanupSize = cleanupSize; |
| 723 | CleanupBits.FixupDepth = fixupDepth; |
| 724 | |
| 725 | assert(CleanupBits.CleanupSize == cleanupSize && "cleanup size overflow"); |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 726 | } |
| 727 | |
| 728 | ~EHCleanupScope() { |
| 729 | delete ExtInfo; |
| 730 | } |
| 731 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 732 | bool isNormalCleanup() const { return CleanupBits.IsNormalCleanup; } |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 733 | llvm::BasicBlock *getNormalBlock() const { return NormalBlock; } |
| 734 | void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; } |
| 735 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 736 | bool isEHCleanup() const { return CleanupBits.IsEHCleanup; } |
| 737 | llvm::BasicBlock *getEHBlock() const { return getCachedEHDispatchBlock(); } |
| 738 | void setEHBlock(llvm::BasicBlock *BB) { setCachedEHDispatchBlock(BB); } |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 739 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 740 | bool isActive() const { return CleanupBits.IsActive; } |
| 741 | void setActive(bool A) { CleanupBits.IsActive = A; } |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 742 | |
| 743 | llvm::AllocaInst *getActiveFlag() const { return ActiveFlag; } |
| 744 | void setActiveFlag(llvm::AllocaInst *Var) { ActiveFlag = Var; } |
| 745 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 746 | void setTestFlagInNormalCleanup() { |
| 747 | CleanupBits.TestFlagInNormalCleanup = true; |
| 748 | } |
| 749 | bool shouldTestFlagInNormalCleanup() const { |
| 750 | return CleanupBits.TestFlagInNormalCleanup; |
| 751 | } |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 752 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 753 | void setTestFlagInEHCleanup() { |
| 754 | CleanupBits.TestFlagInEHCleanup = true; |
| 755 | } |
| 756 | bool shouldTestFlagInEHCleanup() const { |
| 757 | return CleanupBits.TestFlagInEHCleanup; |
| 758 | } |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 759 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 760 | unsigned getFixupDepth() const { return CleanupBits.FixupDepth; } |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 761 | EHScopeStack::stable_iterator getEnclosingNormalCleanup() const { |
| 762 | return EnclosingNormal; |
| 763 | } |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 764 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 765 | size_t getCleanupSize() const { return CleanupBits.CleanupSize; } |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 766 | void *getCleanupBuffer() { return this + 1; } |
| 767 | |
| 768 | EHScopeStack::Cleanup *getCleanup() { |
| 769 | return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer()); |
| 770 | } |
| 771 | |
| 772 | /// True if this cleanup scope has any branch-afters or branch-throughs. |
| 773 | bool hasBranches() const { return ExtInfo && !ExtInfo->Branches.empty(); } |
| 774 | |
| 775 | /// Add a branch-after to this cleanup scope. A branch-after is a |
| 776 | /// branch from a point protected by this (normal) cleanup to a |
| 777 | /// point in the normal cleanup scope immediately containing it. |
| 778 | /// For example, |
| 779 | /// for (;;) { A a; break; } |
| 780 | /// contains a branch-after. |
| 781 | /// |
| 782 | /// Branch-afters each have their own destination out of the |
| 783 | /// cleanup, guaranteed distinct from anything else threaded through |
| 784 | /// it. Therefore branch-afters usually force a switch after the |
| 785 | /// cleanup. |
| 786 | void addBranchAfter(llvm::ConstantInt *Index, |
| 787 | llvm::BasicBlock *Block) { |
| 788 | struct ExtInfo &ExtInfo = getExtInfo(); |
| 789 | if (ExtInfo.Branches.insert(Block)) |
| 790 | ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index)); |
| 791 | } |
| 792 | |
| 793 | /// Return the number of unique branch-afters on this scope. |
| 794 | unsigned getNumBranchAfters() const { |
| 795 | return ExtInfo ? ExtInfo->BranchAfters.size() : 0; |
| 796 | } |
| 797 | |
| 798 | llvm::BasicBlock *getBranchAfterBlock(unsigned I) const { |
| 799 | assert(I < getNumBranchAfters()); |
| 800 | return ExtInfo->BranchAfters[I].first; |
| 801 | } |
| 802 | |
| 803 | llvm::ConstantInt *getBranchAfterIndex(unsigned I) const { |
| 804 | assert(I < getNumBranchAfters()); |
| 805 | return ExtInfo->BranchAfters[I].second; |
| 806 | } |
| 807 | |
| 808 | /// Add a branch-through to this cleanup scope. A branch-through is |
| 809 | /// a branch from a scope protected by this (normal) cleanup to an |
| 810 | /// enclosing scope other than the immediately-enclosing normal |
| 811 | /// cleanup scope. |
| 812 | /// |
| 813 | /// In the following example, the branch through B's scope is a |
| 814 | /// branch-through, while the branch through A's scope is a |
| 815 | /// branch-after: |
| 816 | /// for (;;) { A a; B b; break; } |
| 817 | /// |
| 818 | /// All branch-throughs have a common destination out of the |
| 819 | /// cleanup, one possibly shared with the fall-through. Therefore |
| 820 | /// branch-throughs usually don't force a switch after the cleanup. |
| 821 | /// |
| 822 | /// \return true if the branch-through was new to this scope |
| 823 | bool addBranchThrough(llvm::BasicBlock *Block) { |
| 824 | return getExtInfo().Branches.insert(Block); |
| 825 | } |
| 826 | |
| 827 | /// Determines if this cleanup scope has any branch throughs. |
| 828 | bool hasBranchThroughs() const { |
| 829 | if (!ExtInfo) return false; |
| 830 | return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size()); |
| 831 | } |
| 832 | |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 833 | static bool classof(const EHScope *Scope) { |
| 834 | return (Scope->getKind() == Cleanup); |
| 835 | } |
| 836 | }; |
| 837 | |
| 838 | /// An exceptions scope which filters exceptions thrown through it. |
| 839 | /// Only exceptions matching the filter types will be permitted to be |
| 840 | /// thrown. |
| 841 | /// |
| 842 | /// This is used to implement C++ exception specifications. |
| 843 | class EHFilterScope : public EHScope { |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 844 | // Essentially ends in a flexible array member: |
| 845 | // llvm::Value *FilterTypes[0]; |
| 846 | |
| 847 | llvm::Value **getFilters() { |
| 848 | return reinterpret_cast<llvm::Value**>(this+1); |
| 849 | } |
| 850 | |
| 851 | llvm::Value * const *getFilters() const { |
| 852 | return reinterpret_cast<llvm::Value* const *>(this+1); |
| 853 | } |
| 854 | |
| 855 | public: |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 856 | EHFilterScope(unsigned numFilters) |
| 857 | : EHScope(Filter, EHScopeStack::stable_end()) { |
| 858 | FilterBits.NumFilters = numFilters; |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 859 | } |
| 860 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 861 | static size_t getSizeForNumFilters(unsigned numFilters) { |
| 862 | return sizeof(EHFilterScope) + numFilters * sizeof(llvm::Value*); |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 863 | } |
| 864 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 865 | unsigned getNumFilters() const { return FilterBits.NumFilters; } |
| 866 | |
| 867 | void setFilter(unsigned i, llvm::Value *filterValue) { |
| 868 | assert(i < getNumFilters()); |
| 869 | getFilters()[i] = filterValue; |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 870 | } |
| 871 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 872 | llvm::Value *getFilter(unsigned i) const { |
| 873 | assert(i < getNumFilters()); |
| 874 | return getFilters()[i]; |
| 875 | } |
| 876 | |
| 877 | static bool classof(const EHScope *scope) { |
| 878 | return scope->getKind() == Filter; |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 879 | } |
| 880 | }; |
| 881 | |
| 882 | /// An exceptions scope which calls std::terminate if any exception |
| 883 | /// reaches it. |
| 884 | class EHTerminateScope : public EHScope { |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 885 | public: |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 886 | EHTerminateScope(EHScopeStack::stable_iterator enclosingEHScope) |
| 887 | : EHScope(Terminate, enclosingEHScope) {} |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 888 | static size_t getSize() { return sizeof(EHTerminateScope); } |
| 889 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 890 | static bool classof(const EHScope *scope) { |
| 891 | return scope->getKind() == Terminate; |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 892 | } |
| 893 | }; |
| 894 | |
| 895 | /// A non-stable pointer into the scope stack. |
| 896 | class EHScopeStack::iterator { |
| 897 | char *Ptr; |
| 898 | |
| 899 | friend class EHScopeStack; |
| 900 | explicit iterator(char *Ptr) : Ptr(Ptr) {} |
| 901 | |
| 902 | public: |
| 903 | iterator() : Ptr(0) {} |
| 904 | |
| 905 | EHScope *get() const { |
| 906 | return reinterpret_cast<EHScope*>(Ptr); |
| 907 | } |
| 908 | |
| 909 | EHScope *operator->() const { return get(); } |
| 910 | EHScope &operator*() const { return *get(); } |
| 911 | |
| 912 | iterator &operator++() { |
| 913 | switch (get()->getKind()) { |
| 914 | case EHScope::Catch: |
| 915 | Ptr += EHCatchScope::getSizeForNumHandlers( |
| 916 | static_cast<const EHCatchScope*>(get())->getNumHandlers()); |
| 917 | break; |
| 918 | |
| 919 | case EHScope::Filter: |
| 920 | Ptr += EHFilterScope::getSizeForNumFilters( |
| 921 | static_cast<const EHFilterScope*>(get())->getNumFilters()); |
| 922 | break; |
| 923 | |
| 924 | case EHScope::Cleanup: |
| 925 | Ptr += static_cast<const EHCleanupScope*>(get()) |
| 926 | ->getAllocatedSize(); |
| 927 | break; |
| 928 | |
| 929 | case EHScope::Terminate: |
| 930 | Ptr += EHTerminateScope::getSize(); |
| 931 | break; |
| 932 | } |
| 933 | |
| 934 | return *this; |
| 935 | } |
| 936 | |
| 937 | iterator next() { |
| 938 | iterator copy = *this; |
| 939 | ++copy; |
| 940 | return copy; |
| 941 | } |
| 942 | |
| 943 | iterator operator++(int) { |
| 944 | iterator copy = *this; |
| 945 | operator++(); |
| 946 | return copy; |
| 947 | } |
| 948 | |
| 949 | bool encloses(iterator other) const { return Ptr >= other.Ptr; } |
| 950 | bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; } |
| 951 | |
| 952 | bool operator==(iterator other) const { return Ptr == other.Ptr; } |
| 953 | bool operator!=(iterator other) const { return Ptr != other.Ptr; } |
| 954 | }; |
| 955 | |
| 956 | inline EHScopeStack::iterator EHScopeStack::begin() const { |
| 957 | return iterator(StartOfData); |
| 958 | } |
| 959 | |
| 960 | inline EHScopeStack::iterator EHScopeStack::end() const { |
| 961 | return iterator(EndOfBuffer); |
| 962 | } |
| 963 | |
| 964 | inline void EHScopeStack::popCatch() { |
| 965 | assert(!empty() && "popping exception stack when not empty"); |
| 966 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 967 | EHCatchScope &scope = cast<EHCatchScope>(*begin()); |
| 968 | InnermostEHScope = scope.getEnclosingEHScope(); |
| 969 | StartOfData += EHCatchScope::getSizeForNumHandlers(scope.getNumHandlers()); |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 970 | } |
| 971 | |
| 972 | inline void EHScopeStack::popTerminate() { |
| 973 | assert(!empty() && "popping exception stack when not empty"); |
| 974 | |
John McCall | 777d6e5 | 2011-08-11 02:22:43 +0000 | [diff] [blame] | 975 | EHTerminateScope &scope = cast<EHTerminateScope>(*begin()); |
| 976 | InnermostEHScope = scope.getEnclosingEHScope(); |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 977 | StartOfData += EHTerminateScope::getSize(); |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 978 | } |
| 979 | |
| 980 | inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const { |
| 981 | assert(sp.isValid() && "finding invalid savepoint"); |
| 982 | assert(sp.Size <= stable_begin().Size && "finding savepoint after pop"); |
| 983 | return iterator(EndOfBuffer - sp.Size); |
| 984 | } |
| 985 | |
| 986 | inline EHScopeStack::stable_iterator |
| 987 | EHScopeStack::stabilize(iterator ir) const { |
| 988 | assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer); |
| 989 | return stable_iterator(EndOfBuffer - ir.Ptr); |
| 990 | } |
| 991 | |
John McCall | 36f893c | 2011-01-28 11:13:47 +0000 | [diff] [blame] | 992 | } |
| 993 | } |
| 994 | |
| 995 | #endif |