| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 1 | //===-- EHScopeStack.h - Stack for cleanup IR generation --------*- C++ -*-===// |
| 2 | // |
| Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 4 | // See https://llvm.org/LICENSE.txt for license information. |
| 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 6 | // |
| 7 | //===----------------------------------------------------------------------===// |
| 8 | // |
| 9 | // These classes should be the minimum interface required for other parts of |
| 10 | // CodeGen to emit cleanups. The implementation is in CGCleanup.cpp and other |
| 11 | // implemenentation details that are not widely needed are in CGCleanup.h. |
| 12 | // |
| 13 | //===----------------------------------------------------------------------===// |
| 14 | |
| Benjamin Kramer | 2f5db8b | 2014-08-13 16:25:19 +0000 | [diff] [blame] | 15 | #ifndef LLVM_CLANG_LIB_CODEGEN_EHSCOPESTACK_H |
| 16 | #define LLVM_CLANG_LIB_CODEGEN_EHSCOPESTACK_H |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 17 | |
| 18 | #include "clang/Basic/LLVM.h" |
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 19 | #include "llvm/ADT/STLExtras.h" |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 20 | #include "llvm/ADT/SmallVector.h" |
| 21 | #include "llvm/IR/BasicBlock.h" |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 22 | #include "llvm/IR/Instructions.h" |
| Chandler Carruth | 5553d0d | 2014-01-07 11:51:46 +0000 | [diff] [blame] | 23 | #include "llvm/IR/Value.h" |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 24 | |
| 25 | namespace clang { |
| 26 | namespace CodeGen { |
| 27 | |
| 28 | class CodeGenFunction; |
| 29 | |
| 30 | /// A branch fixup. These are required when emitting a goto to a |
| 31 | /// label which hasn't been emitted yet. The goto is optimistically |
| 32 | /// emitted as a branch to the basic block for the label, and (if it |
| 33 | /// occurs in a scope with non-trivial cleanups) a fixup is added to |
| 34 | /// the innermost cleanup. When a (normal) cleanup is popped, any |
| 35 | /// unresolved fixups in that scope are threaded through the cleanup. |
| 36 | struct BranchFixup { |
| 37 | /// The block containing the terminator which needs to be modified |
| 38 | /// into a switch if this fixup is resolved into the current scope. |
| 39 | /// If null, LatestBranch points directly to the destination. |
| 40 | llvm::BasicBlock *OptimisticBranchBlock; |
| 41 | |
| 42 | /// The ultimate destination of the branch. |
| 43 | /// |
| 44 | /// This can be set to null to indicate that this fixup was |
| 45 | /// successfully resolved. |
| 46 | llvm::BasicBlock *Destination; |
| 47 | |
| 48 | /// The destination index value. |
| 49 | unsigned DestinationIndex; |
| 50 | |
| 51 | /// The initial branch of the fixup. |
| 52 | llvm::BranchInst *InitialBranch; |
| 53 | }; |
| 54 | |
| 55 | template <class T> struct InvariantValue { |
| 56 | typedef T type; |
| 57 | typedef T saved_type; |
| 58 | static bool needsSaving(type value) { return false; } |
| 59 | static saved_type save(CodeGenFunction &CGF, type value) { return value; } |
| 60 | static type restore(CodeGenFunction &CGF, saved_type value) { return value; } |
| 61 | }; |
| 62 | |
| 63 | /// A metaprogramming class for ensuring that a value will dominate an |
| 64 | /// arbitrary position in a function. |
| 65 | template <class T> struct DominatingValue : InvariantValue<T> {}; |
| 66 | |
| 67 | template <class T, bool mightBeInstruction = |
| Benjamin Kramer | ed2f476 | 2014-03-07 14:30:23 +0000 | [diff] [blame] | 68 | std::is_base_of<llvm::Value, T>::value && |
| 69 | !std::is_base_of<llvm::Constant, T>::value && |
| 70 | !std::is_base_of<llvm::BasicBlock, T>::value> |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 71 | struct DominatingPointer; |
| 72 | template <class T> struct DominatingPointer<T,false> : InvariantValue<T*> {}; |
| 73 | // template <class T> struct DominatingPointer<T,true> at end of file |
| 74 | |
| 75 | template <class T> struct DominatingValue<T*> : DominatingPointer<T> {}; |
| 76 | |
| Reid Kleckner | c311aba | 2014-10-31 23:33:56 +0000 | [diff] [blame] | 77 | enum CleanupKind : unsigned { |
| Nico Weber | ebf9a05 | 2015-02-25 03:58:36 +0000 | [diff] [blame] | 78 | /// Denotes a cleanup that should run when a scope is exited using exceptional |
| 79 | /// control flow (a throw statement leading to stack unwinding, ). |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 80 | EHCleanup = 0x1, |
| Nico Weber | ebf9a05 | 2015-02-25 03:58:36 +0000 | [diff] [blame] | 81 | |
| 82 | /// Denotes a cleanup that should run when a scope is exited using normal |
| 83 | /// control flow (falling off the end of the scope, return, goto, ...). |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 84 | NormalCleanup = 0x2, |
| Nico Weber | ebf9a05 | 2015-02-25 03:58:36 +0000 | [diff] [blame] | 85 | |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 86 | NormalAndEHCleanup = EHCleanup | NormalCleanup, |
| 87 | |
| 88 | InactiveCleanup = 0x4, |
| 89 | InactiveEHCleanup = EHCleanup | InactiveCleanup, |
| 90 | InactiveNormalCleanup = NormalCleanup | InactiveCleanup, |
| Tim Shen | 421119f | 2016-07-01 21:08:47 +0000 | [diff] [blame] | 91 | InactiveNormalAndEHCleanup = NormalAndEHCleanup | InactiveCleanup, |
| 92 | |
| 93 | LifetimeMarker = 0x8, |
| 94 | NormalEHLifetimeMarker = LifetimeMarker | NormalAndEHCleanup, |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 95 | }; |
| 96 | |
| 97 | /// A stack of scopes which respond to exceptions, including cleanups |
| 98 | /// and catch blocks. |
| 99 | class EHScopeStack { |
| 100 | public: |
| James Y Knight | 8041e59 | 2015-07-17 21:58:11 +0000 | [diff] [blame] | 101 | /* Should switch to alignof(uint64_t) instead of 8, when EHCleanupScope can */ |
| 102 | enum { ScopeStackAlignment = 8 }; |
| James Y Knight | 53c7616 | 2015-07-17 18:21:37 +0000 | [diff] [blame] | 103 | |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 104 | /// A saved depth on the scope stack. This is necessary because |
| 105 | /// pushing scopes onto the stack invalidates iterators. |
| 106 | class stable_iterator { |
| 107 | friend class EHScopeStack; |
| 108 | |
| 109 | /// Offset from StartOfData to EndOfBuffer. |
| 110 | ptrdiff_t Size; |
| 111 | |
| 112 | stable_iterator(ptrdiff_t Size) : Size(Size) {} |
| 113 | |
| 114 | public: |
| 115 | static stable_iterator invalid() { return stable_iterator(-1); } |
| 116 | stable_iterator() : Size(-1) {} |
| 117 | |
| 118 | bool isValid() const { return Size >= 0; } |
| 119 | |
| 120 | /// Returns true if this scope encloses I. |
| 121 | /// Returns false if I is invalid. |
| 122 | /// This scope must be valid. |
| 123 | bool encloses(stable_iterator I) const { return Size <= I.Size; } |
| 124 | |
| 125 | /// Returns true if this scope strictly encloses I: that is, |
| 126 | /// if it encloses I and is not I. |
| 127 | /// Returns false is I is invalid. |
| 128 | /// This scope must be valid. |
| 129 | bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; } |
| 130 | |
| 131 | friend bool operator==(stable_iterator A, stable_iterator B) { |
| 132 | return A.Size == B.Size; |
| 133 | } |
| 134 | friend bool operator!=(stable_iterator A, stable_iterator B) { |
| 135 | return A.Size != B.Size; |
| 136 | } |
| 137 | }; |
| 138 | |
| 139 | /// Information for lazily generating a cleanup. Subclasses must be |
| 140 | /// POD-like: cleanups will not be destructed, and they will be |
| 141 | /// allocated on the cleanup stack and freely copied and moved |
| 142 | /// around. |
| 143 | /// |
| 144 | /// Cleanup implementations should generally be declared in an |
| 145 | /// anonymous namespace. |
| 146 | class Cleanup { |
| 147 | // Anchor the construction vtable. |
| 148 | virtual void anchor(); |
| David Blaikie | fb2b796 | 2015-08-18 22:09:28 +0000 | [diff] [blame] | 149 | |
| David Blaikie | 7e70d68 | 2015-08-18 22:40:54 +0000 | [diff] [blame] | 150 | protected: |
| David Blaikie | b5eafda | 2020-03-21 21:17:33 -0700 | [diff] [blame] | 151 | ~Cleanup() = default; |
| David Blaikie | 7e70d68 | 2015-08-18 22:40:54 +0000 | [diff] [blame] | 152 | |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 153 | public: |
| David Blaikie | fb2b796 | 2015-08-18 22:09:28 +0000 | [diff] [blame] | 154 | Cleanup(const Cleanup &) = default; |
| David Blaikie | 50ccaa6 | 2015-08-18 22:10:49 +0000 | [diff] [blame] | 155 | Cleanup(Cleanup &&) {} |
| David Blaikie | fb2b796 | 2015-08-18 22:09:28 +0000 | [diff] [blame] | 156 | Cleanup() = default; |
| 157 | |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 158 | /// Generation flags. |
| 159 | class Flags { |
| 160 | enum { |
| Aaron Smith | 4eabd00 | 2020-04-30 09:38:19 -0700 | [diff] [blame] | 161 | F_IsForEH = 0x1, |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 162 | F_IsNormalCleanupKind = 0x2, |
| Aaron Smith | 4eabd00 | 2020-04-30 09:38:19 -0700 | [diff] [blame] | 163 | F_IsEHCleanupKind = 0x4, |
| 164 | F_HasExitSwitch = 0x8, |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 165 | }; |
| 166 | unsigned flags; |
| 167 | |
| 168 | public: |
| 169 | Flags() : flags(0) {} |
| 170 | |
| 171 | /// isForEH - true if the current emission is for an EH cleanup. |
| 172 | bool isForEHCleanup() const { return flags & F_IsForEH; } |
| 173 | bool isForNormalCleanup() const { return !isForEHCleanup(); } |
| 174 | void setIsForEHCleanup() { flags |= F_IsForEH; } |
| 175 | |
| 176 | bool isNormalCleanupKind() const { return flags & F_IsNormalCleanupKind; } |
| 177 | void setIsNormalCleanupKind() { flags |= F_IsNormalCleanupKind; } |
| 178 | |
| 179 | /// isEHCleanupKind - true if the cleanup was pushed as an EH |
| 180 | /// cleanup. |
| 181 | bool isEHCleanupKind() const { return flags & F_IsEHCleanupKind; } |
| 182 | void setIsEHCleanupKind() { flags |= F_IsEHCleanupKind; } |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 183 | |
| Aaron Smith | 4eabd00 | 2020-04-30 09:38:19 -0700 | [diff] [blame] | 184 | bool hasExitSwitch() const { return flags & F_HasExitSwitch; } |
| 185 | void setHasExitSwitch() { flags |= F_HasExitSwitch; } |
| 186 | }; |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 187 | |
| 188 | /// Emit the cleanup. For normal cleanups, this is run in the |
| 189 | /// same EH context as when the cleanup was pushed, i.e. the |
| 190 | /// immediately-enclosing context of the cleanup scope. For |
| 191 | /// EH cleanups, this is run in a terminate context. |
| 192 | /// |
| 193 | // \param flags cleanup kind. |
| 194 | virtual void Emit(CodeGenFunction &CGF, Flags flags) = 0; |
| 195 | }; |
| 196 | |
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 197 | /// ConditionalCleanup stores the saved form of its parameters, |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 198 | /// then restores them and performs the cleanup. |
| David Blaikie | 7e70d68 | 2015-08-18 22:40:54 +0000 | [diff] [blame] | 199 | template <class T, class... As> |
| 200 | class ConditionalCleanup final : public Cleanup { |
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 201 | typedef std::tuple<typename DominatingValue<As>::saved_type...> SavedTuple; |
| 202 | SavedTuple Saved; |
| 203 | |
| 204 | template <std::size_t... Is> |
| Benjamin Kramer | 37508d3 | 2019-08-15 10:56:05 +0000 | [diff] [blame] | 205 | T restore(CodeGenFunction &CGF, std::index_sequence<Is...>) { |
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 206 | // It's important that the restores are emitted in order. The braced init |
| Simon Pilgrim | 2c51880 | 2017-03-30 14:13:19 +0000 | [diff] [blame] | 207 | // list guarantees that. |
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 208 | return T{DominatingValue<As>::restore(CGF, std::get<Is>(Saved))...}; |
| 209 | } |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 210 | |
| Craig Topper | 4f12f10 | 2014-03-12 06:41:41 +0000 | [diff] [blame] | 211 | void Emit(CodeGenFunction &CGF, Flags flags) override { |
| Benjamin Kramer | 37508d3 | 2019-08-15 10:56:05 +0000 | [diff] [blame] | 212 | restore(CGF, std::index_sequence_for<As...>()).Emit(CGF, flags); |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 213 | } |
| 214 | |
| 215 | public: |
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 216 | ConditionalCleanup(typename DominatingValue<As>::saved_type... A) |
| 217 | : Saved(A...) {} |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 218 | |
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 219 | ConditionalCleanup(SavedTuple Tuple) : Saved(std::move(Tuple)) {} |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 220 | }; |
| 221 | |
| 222 | private: |
| 223 | // The implementation for this class is in CGException.h and |
| 224 | // CGException.cpp; the definition is here because it's used as a |
| 225 | // member of CodeGenFunction. |
| 226 | |
| 227 | /// The start of the scope-stack buffer, i.e. the allocated pointer |
| 228 | /// for the buffer. All of these pointers are either simultaneously |
| 229 | /// null or simultaneously valid. |
| 230 | char *StartOfBuffer; |
| 231 | |
| 232 | /// The end of the buffer. |
| 233 | char *EndOfBuffer; |
| 234 | |
| 235 | /// The first valid entry in the buffer. |
| 236 | char *StartOfData; |
| 237 | |
| 238 | /// The innermost normal cleanup on the stack. |
| 239 | stable_iterator InnermostNormalCleanup; |
| 240 | |
| 241 | /// The innermost EH scope on the stack. |
| 242 | stable_iterator InnermostEHScope; |
| 243 | |
| 244 | /// The current set of branch fixups. A branch fixup is a jump to |
| 245 | /// an as-yet unemitted label, i.e. a label for which we don't yet |
| 246 | /// know the EH stack depth. Whenever we pop a cleanup, we have |
| 247 | /// to thread all the current branch fixups through it. |
| 248 | /// |
| 249 | /// Fixups are recorded as the Use of the respective branch or |
| 250 | /// switch statement. The use points to the final destination. |
| 251 | /// When popping out of a cleanup, these uses are threaded through |
| 252 | /// the cleanup and adjusted to point to the new cleanup. |
| 253 | /// |
| 254 | /// Note that branches are allowed to jump into protected scopes |
| 255 | /// in certain situations; e.g. the following code is legal: |
| 256 | /// struct A { ~A(); }; // trivial ctor, non-trivial dtor |
| 257 | /// goto foo; |
| 258 | /// A a; |
| 259 | /// foo: |
| 260 | /// bar(); |
| 261 | SmallVector<BranchFixup, 8> BranchFixups; |
| 262 | |
| 263 | char *allocate(size_t Size); |
| James Y Knight | 53c7616 | 2015-07-17 18:21:37 +0000 | [diff] [blame] | 264 | void deallocate(size_t Size); |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 265 | |
| 266 | void *pushCleanup(CleanupKind K, size_t DataSize); |
| 267 | |
| 268 | public: |
| Craig Topper | 8a13c41 | 2014-05-21 05:09:00 +0000 | [diff] [blame] | 269 | EHScopeStack() : StartOfBuffer(nullptr), EndOfBuffer(nullptr), |
| 270 | StartOfData(nullptr), InnermostNormalCleanup(stable_end()), |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 271 | InnermostEHScope(stable_end()) {} |
| 272 | ~EHScopeStack() { delete[] StartOfBuffer; } |
| 273 | |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 274 | /// Push a lazily-created cleanup on the stack. |
| Benjamin Kramer | c582c89 | 2015-02-15 20:11:22 +0000 | [diff] [blame] | 275 | template <class T, class... As> void pushCleanup(CleanupKind Kind, As... A) { |
| Benjamin Kramer | c3f8925 | 2016-10-20 14:27:22 +0000 | [diff] [blame] | 276 | static_assert(alignof(T) <= ScopeStackAlignment, |
| James Y Knight | 53c7616 | 2015-07-17 18:21:37 +0000 | [diff] [blame] | 277 | "Cleanup's alignment is too large."); |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 278 | void *Buffer = pushCleanup(Kind, sizeof(T)); |
| Benjamin Kramer | c582c89 | 2015-02-15 20:11:22 +0000 | [diff] [blame] | 279 | Cleanup *Obj = new (Buffer) T(A...); |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 280 | (void) Obj; |
| 281 | } |
| 282 | |
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 283 | /// Push a lazily-created cleanup on the stack. Tuple version. |
| 284 | template <class T, class... As> |
| Benjamin Kramer | 7f1f6b5 | 2015-03-12 23:46:55 +0000 | [diff] [blame] | 285 | void pushCleanupTuple(CleanupKind Kind, std::tuple<As...> A) { |
| Benjamin Kramer | c3f8925 | 2016-10-20 14:27:22 +0000 | [diff] [blame] | 286 | static_assert(alignof(T) <= ScopeStackAlignment, |
| James Y Knight | 53c7616 | 2015-07-17 18:21:37 +0000 | [diff] [blame] | 287 | "Cleanup's alignment is too large."); |
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 288 | void *Buffer = pushCleanup(Kind, sizeof(T)); |
| 289 | Cleanup *Obj = new (Buffer) T(std::move(A)); |
| 290 | (void) Obj; |
| 291 | } |
| 292 | |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 293 | // Feel free to add more variants of the following: |
| 294 | |
| 295 | /// Push a cleanup with non-constant storage requirements on the |
| 296 | /// stack. The cleanup type must provide an additional static method: |
| 297 | /// static size_t getExtraSize(size_t); |
| 298 | /// The argument to this method will be the value N, which will also |
| 299 | /// be passed as the first argument to the constructor. |
| 300 | /// |
| 301 | /// The data stored in the extra storage must obey the same |
| 302 | /// restrictions as normal cleanup member data. |
| 303 | /// |
| 304 | /// The pointer returned from this method is valid until the cleanup |
| 305 | /// stack is modified. |
| Benjamin Kramer | c582c89 | 2015-02-15 20:11:22 +0000 | [diff] [blame] | 306 | template <class T, class... As> |
| Benjamin Kramer | 583089c | 2015-02-15 20:24:47 +0000 | [diff] [blame] | 307 | T *pushCleanupWithExtra(CleanupKind Kind, size_t N, As... A) { |
| Benjamin Kramer | c3f8925 | 2016-10-20 14:27:22 +0000 | [diff] [blame] | 308 | static_assert(alignof(T) <= ScopeStackAlignment, |
| James Y Knight | 53c7616 | 2015-07-17 18:21:37 +0000 | [diff] [blame] | 309 | "Cleanup's alignment is too large."); |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 310 | void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N)); |
| Benjamin Kramer | c582c89 | 2015-02-15 20:11:22 +0000 | [diff] [blame] | 311 | return new (Buffer) T(N, A...); |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 312 | } |
| 313 | |
| 314 | void pushCopyOfCleanup(CleanupKind Kind, const void *Cleanup, size_t Size) { |
| 315 | void *Buffer = pushCleanup(Kind, Size); |
| 316 | std::memcpy(Buffer, Cleanup, Size); |
| 317 | } |
| 318 | |
| 319 | /// Pops a cleanup scope off the stack. This is private to CGCleanup.cpp. |
| 320 | void popCleanup(); |
| 321 | |
| 322 | /// Push a set of catch handlers on the stack. The catch is |
| 323 | /// uninitialized and will need to have the given number of handlers |
| 324 | /// set on it. |
| 325 | class EHCatchScope *pushCatch(unsigned NumHandlers); |
| 326 | |
| 327 | /// Pops a catch scope off the stack. This is private to CGException.cpp. |
| 328 | void popCatch(); |
| 329 | |
| 330 | /// Push an exceptions filter on the stack. |
| 331 | class EHFilterScope *pushFilter(unsigned NumFilters); |
| 332 | |
| 333 | /// Pops an exceptions filter off the stack. |
| 334 | void popFilter(); |
| 335 | |
| 336 | /// Push a terminate handler on the stack. |
| 337 | void pushTerminate(); |
| 338 | |
| 339 | /// Pops a terminate handler off the stack. |
| 340 | void popTerminate(); |
| 341 | |
| David Majnemer | dc012fa | 2015-04-22 21:38:15 +0000 | [diff] [blame] | 342 | // Returns true iff the current scope is either empty or contains only |
| 343 | // lifetime markers, i.e. no real cleanup code |
| 344 | bool containsOnlyLifetimeMarkers(stable_iterator Old) const; |
| 345 | |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 346 | /// Determines whether the exception-scopes stack is empty. |
| 347 | bool empty() const { return StartOfData == EndOfBuffer; } |
| 348 | |
| Akira Hatanaka | 8af7bb2 | 2016-04-01 22:58:55 +0000 | [diff] [blame] | 349 | bool requiresLandingPad() const; |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 350 | |
| 351 | /// Determines whether there are any normal cleanups on the stack. |
| 352 | bool hasNormalCleanups() const { |
| 353 | return InnermostNormalCleanup != stable_end(); |
| 354 | } |
| 355 | |
| 356 | /// Returns the innermost normal cleanup on the stack, or |
| 357 | /// stable_end() if there are no normal cleanups. |
| 358 | stable_iterator getInnermostNormalCleanup() const { |
| 359 | return InnermostNormalCleanup; |
| 360 | } |
| 361 | stable_iterator getInnermostActiveNormalCleanup() const; |
| 362 | |
| 363 | stable_iterator getInnermostEHScope() const { |
| 364 | return InnermostEHScope; |
| 365 | } |
| 366 | |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 367 | |
| 368 | /// An unstable reference to a scope-stack depth. Invalidated by |
| 369 | /// pushes but not pops. |
| 370 | class iterator; |
| 371 | |
| 372 | /// Returns an iterator pointing to the innermost EH scope. |
| 373 | iterator begin() const; |
| 374 | |
| 375 | /// Returns an iterator pointing to the outermost EH scope. |
| 376 | iterator end() const; |
| 377 | |
| 378 | /// Create a stable reference to the top of the EH stack. The |
| 379 | /// returned reference is valid until that scope is popped off the |
| 380 | /// stack. |
| 381 | stable_iterator stable_begin() const { |
| 382 | return stable_iterator(EndOfBuffer - StartOfData); |
| 383 | } |
| 384 | |
| 385 | /// Create a stable reference to the bottom of the EH stack. |
| 386 | static stable_iterator stable_end() { |
| 387 | return stable_iterator(0); |
| 388 | } |
| 389 | |
| 390 | /// Translates an iterator into a stable_iterator. |
| 391 | stable_iterator stabilize(iterator it) const; |
| 392 | |
| 393 | /// Turn a stable reference to a scope depth into a unstable pointer |
| 394 | /// to the EH stack. |
| 395 | iterator find(stable_iterator save) const; |
| 396 | |
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 397 | /// Add a branch fixup to the current cleanup scope. |
| 398 | BranchFixup &addBranchFixup() { |
| 399 | assert(hasNormalCleanups() && "adding fixup in scope without cleanups"); |
| 400 | BranchFixups.push_back(BranchFixup()); |
| 401 | return BranchFixups.back(); |
| 402 | } |
| 403 | |
| 404 | unsigned getNumBranchFixups() const { return BranchFixups.size(); } |
| 405 | BranchFixup &getBranchFixup(unsigned I) { |
| 406 | assert(I < getNumBranchFixups()); |
| 407 | return BranchFixups[I]; |
| 408 | } |
| 409 | |
| 410 | /// Pops lazily-removed fixups from the end of the list. This |
| 411 | /// should only be called by procedures which have just popped a |
| 412 | /// cleanup or resolved one or more fixups. |
| 413 | void popNullFixups(); |
| 414 | |
| 415 | /// Clears the branch-fixups list. This should only be called by |
| 416 | /// ResolveAllBranchFixups. |
| 417 | void clearFixups() { BranchFixups.clear(); } |
| 418 | }; |
| 419 | |
| 420 | } // namespace CodeGen |
| 421 | } // namespace clang |
| 422 | |
| 423 | #endif |