| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 1 | //===-- EHScopeStack.h - Stack for cleanup IR generation --------*- C++ -*-===// | 
|  | 2 | // | 
| Chandler Carruth | 2946cd7 | 2019-01-19 08:50:56 +0000 | [diff] [blame] | 3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. | 
|  | 4 | // See https://llvm.org/LICENSE.txt for license information. | 
|  | 5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 6 | // | 
|  | 7 | //===----------------------------------------------------------------------===// | 
|  | 8 | // | 
|  | 9 | // These classes should be the minimum interface required for other parts of | 
|  | 10 | // CodeGen to emit cleanups.  The implementation is in CGCleanup.cpp and other | 
|  | 11 | // implemenentation details that are not widely needed are in CGCleanup.h. | 
|  | 12 | // | 
|  | 13 | //===----------------------------------------------------------------------===// | 
|  | 14 |  | 
| Benjamin Kramer | 2f5db8b | 2014-08-13 16:25:19 +0000 | [diff] [blame] | 15 | #ifndef LLVM_CLANG_LIB_CODEGEN_EHSCOPESTACK_H | 
|  | 16 | #define LLVM_CLANG_LIB_CODEGEN_EHSCOPESTACK_H | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 17 |  | 
|  | 18 | #include "clang/Basic/LLVM.h" | 
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 19 | #include "llvm/ADT/STLExtras.h" | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 20 | #include "llvm/ADT/SmallVector.h" | 
|  | 21 | #include "llvm/IR/BasicBlock.h" | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 22 | #include "llvm/IR/Instructions.h" | 
| Chandler Carruth | 5553d0d | 2014-01-07 11:51:46 +0000 | [diff] [blame] | 23 | #include "llvm/IR/Value.h" | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 24 |  | 
|  | 25 | namespace clang { | 
|  | 26 | namespace CodeGen { | 
|  | 27 |  | 
|  | 28 | class CodeGenFunction; | 
|  | 29 |  | 
|  | 30 | /// A branch fixup.  These are required when emitting a goto to a | 
|  | 31 | /// label which hasn't been emitted yet.  The goto is optimistically | 
|  | 32 | /// emitted as a branch to the basic block for the label, and (if it | 
|  | 33 | /// occurs in a scope with non-trivial cleanups) a fixup is added to | 
|  | 34 | /// the innermost cleanup.  When a (normal) cleanup is popped, any | 
|  | 35 | /// unresolved fixups in that scope are threaded through the cleanup. | 
|  | 36 | struct BranchFixup { | 
|  | 37 | /// The block containing the terminator which needs to be modified | 
|  | 38 | /// into a switch if this fixup is resolved into the current scope. | 
|  | 39 | /// If null, LatestBranch points directly to the destination. | 
|  | 40 | llvm::BasicBlock *OptimisticBranchBlock; | 
|  | 41 |  | 
|  | 42 | /// The ultimate destination of the branch. | 
|  | 43 | /// | 
|  | 44 | /// This can be set to null to indicate that this fixup was | 
|  | 45 | /// successfully resolved. | 
|  | 46 | llvm::BasicBlock *Destination; | 
|  | 47 |  | 
|  | 48 | /// The destination index value. | 
|  | 49 | unsigned DestinationIndex; | 
|  | 50 |  | 
|  | 51 | /// The initial branch of the fixup. | 
|  | 52 | llvm::BranchInst *InitialBranch; | 
|  | 53 | }; | 
|  | 54 |  | 
|  | 55 | template <class T> struct InvariantValue { | 
|  | 56 | typedef T type; | 
|  | 57 | typedef T saved_type; | 
|  | 58 | static bool needsSaving(type value) { return false; } | 
|  | 59 | static saved_type save(CodeGenFunction &CGF, type value) { return value; } | 
|  | 60 | static type restore(CodeGenFunction &CGF, saved_type value) { return value; } | 
|  | 61 | }; | 
|  | 62 |  | 
|  | 63 | /// A metaprogramming class for ensuring that a value will dominate an | 
|  | 64 | /// arbitrary position in a function. | 
|  | 65 | template <class T> struct DominatingValue : InvariantValue<T> {}; | 
|  | 66 |  | 
|  | 67 | template <class T, bool mightBeInstruction = | 
| Benjamin Kramer | ed2f476 | 2014-03-07 14:30:23 +0000 | [diff] [blame] | 68 | std::is_base_of<llvm::Value, T>::value && | 
|  | 69 | !std::is_base_of<llvm::Constant, T>::value && | 
|  | 70 | !std::is_base_of<llvm::BasicBlock, T>::value> | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 71 | struct DominatingPointer; | 
|  | 72 | template <class T> struct DominatingPointer<T,false> : InvariantValue<T*> {}; | 
|  | 73 | // template <class T> struct DominatingPointer<T,true> at end of file | 
|  | 74 |  | 
|  | 75 | template <class T> struct DominatingValue<T*> : DominatingPointer<T> {}; | 
|  | 76 |  | 
| Reid Kleckner | c311aba | 2014-10-31 23:33:56 +0000 | [diff] [blame] | 77 | enum CleanupKind : unsigned { | 
| Nico Weber | ebf9a05 | 2015-02-25 03:58:36 +0000 | [diff] [blame] | 78 | /// Denotes a cleanup that should run when a scope is exited using exceptional | 
|  | 79 | /// control flow (a throw statement leading to stack unwinding, ). | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 80 | EHCleanup = 0x1, | 
| Nico Weber | ebf9a05 | 2015-02-25 03:58:36 +0000 | [diff] [blame] | 81 |  | 
|  | 82 | /// Denotes a cleanup that should run when a scope is exited using normal | 
|  | 83 | /// control flow (falling off the end of the scope, return, goto, ...). | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 84 | NormalCleanup = 0x2, | 
| Nico Weber | ebf9a05 | 2015-02-25 03:58:36 +0000 | [diff] [blame] | 85 |  | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 86 | NormalAndEHCleanup = EHCleanup | NormalCleanup, | 
|  | 87 |  | 
|  | 88 | InactiveCleanup = 0x4, | 
|  | 89 | InactiveEHCleanup = EHCleanup | InactiveCleanup, | 
|  | 90 | InactiveNormalCleanup = NormalCleanup | InactiveCleanup, | 
| Tim Shen | 421119f | 2016-07-01 21:08:47 +0000 | [diff] [blame] | 91 | InactiveNormalAndEHCleanup = NormalAndEHCleanup | InactiveCleanup, | 
|  | 92 |  | 
|  | 93 | LifetimeMarker = 0x8, | 
|  | 94 | NormalEHLifetimeMarker = LifetimeMarker | NormalAndEHCleanup, | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 95 | }; | 
|  | 96 |  | 
|  | 97 | /// A stack of scopes which respond to exceptions, including cleanups | 
|  | 98 | /// and catch blocks. | 
|  | 99 | class EHScopeStack { | 
|  | 100 | public: | 
| James Y Knight | 8041e59 | 2015-07-17 21:58:11 +0000 | [diff] [blame] | 101 | /* Should switch to alignof(uint64_t) instead of 8, when EHCleanupScope can */ | 
|  | 102 | enum { ScopeStackAlignment = 8 }; | 
| James Y Knight | 53c7616 | 2015-07-17 18:21:37 +0000 | [diff] [blame] | 103 |  | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 104 | /// A saved depth on the scope stack.  This is necessary because | 
|  | 105 | /// pushing scopes onto the stack invalidates iterators. | 
|  | 106 | class stable_iterator { | 
|  | 107 | friend class EHScopeStack; | 
|  | 108 |  | 
|  | 109 | /// Offset from StartOfData to EndOfBuffer. | 
|  | 110 | ptrdiff_t Size; | 
|  | 111 |  | 
|  | 112 | stable_iterator(ptrdiff_t Size) : Size(Size) {} | 
|  | 113 |  | 
|  | 114 | public: | 
|  | 115 | static stable_iterator invalid() { return stable_iterator(-1); } | 
|  | 116 | stable_iterator() : Size(-1) {} | 
|  | 117 |  | 
|  | 118 | bool isValid() const { return Size >= 0; } | 
|  | 119 |  | 
|  | 120 | /// Returns true if this scope encloses I. | 
|  | 121 | /// Returns false if I is invalid. | 
|  | 122 | /// This scope must be valid. | 
|  | 123 | bool encloses(stable_iterator I) const { return Size <= I.Size; } | 
|  | 124 |  | 
|  | 125 | /// Returns true if this scope strictly encloses I: that is, | 
|  | 126 | /// if it encloses I and is not I. | 
|  | 127 | /// Returns false is I is invalid. | 
|  | 128 | /// This scope must be valid. | 
|  | 129 | bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; } | 
|  | 130 |  | 
|  | 131 | friend bool operator==(stable_iterator A, stable_iterator B) { | 
|  | 132 | return A.Size == B.Size; | 
|  | 133 | } | 
|  | 134 | friend bool operator!=(stable_iterator A, stable_iterator B) { | 
|  | 135 | return A.Size != B.Size; | 
|  | 136 | } | 
|  | 137 | }; | 
|  | 138 |  | 
|  | 139 | /// Information for lazily generating a cleanup.  Subclasses must be | 
|  | 140 | /// POD-like: cleanups will not be destructed, and they will be | 
|  | 141 | /// allocated on the cleanup stack and freely copied and moved | 
|  | 142 | /// around. | 
|  | 143 | /// | 
|  | 144 | /// Cleanup implementations should generally be declared in an | 
|  | 145 | /// anonymous namespace. | 
|  | 146 | class Cleanup { | 
|  | 147 | // Anchor the construction vtable. | 
|  | 148 | virtual void anchor(); | 
| David Blaikie | fb2b796 | 2015-08-18 22:09:28 +0000 | [diff] [blame] | 149 |  | 
| David Blaikie | 7e70d68 | 2015-08-18 22:40:54 +0000 | [diff] [blame] | 150 | protected: | 
|  | 151 | ~Cleanup() = default; | 
|  | 152 |  | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 153 | public: | 
| David Blaikie | fb2b796 | 2015-08-18 22:09:28 +0000 | [diff] [blame] | 154 | Cleanup(const Cleanup &) = default; | 
| David Blaikie | 50ccaa6 | 2015-08-18 22:10:49 +0000 | [diff] [blame] | 155 | Cleanup(Cleanup &&) {} | 
| David Blaikie | fb2b796 | 2015-08-18 22:09:28 +0000 | [diff] [blame] | 156 | Cleanup() = default; | 
|  | 157 |  | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 158 | /// Generation flags. | 
|  | 159 | class Flags { | 
|  | 160 | enum { | 
|  | 161 | F_IsForEH             = 0x1, | 
|  | 162 | F_IsNormalCleanupKind = 0x2, | 
|  | 163 | F_IsEHCleanupKind     = 0x4 | 
|  | 164 | }; | 
|  | 165 | unsigned flags; | 
|  | 166 |  | 
|  | 167 | public: | 
|  | 168 | Flags() : flags(0) {} | 
|  | 169 |  | 
|  | 170 | /// isForEH - true if the current emission is for an EH cleanup. | 
|  | 171 | bool isForEHCleanup() const { return flags & F_IsForEH; } | 
|  | 172 | bool isForNormalCleanup() const { return !isForEHCleanup(); } | 
|  | 173 | void setIsForEHCleanup() { flags |= F_IsForEH; } | 
|  | 174 |  | 
|  | 175 | bool isNormalCleanupKind() const { return flags & F_IsNormalCleanupKind; } | 
|  | 176 | void setIsNormalCleanupKind() { flags |= F_IsNormalCleanupKind; } | 
|  | 177 |  | 
|  | 178 | /// isEHCleanupKind - true if the cleanup was pushed as an EH | 
|  | 179 | /// cleanup. | 
|  | 180 | bool isEHCleanupKind() const { return flags & F_IsEHCleanupKind; } | 
|  | 181 | void setIsEHCleanupKind() { flags |= F_IsEHCleanupKind; } | 
|  | 182 | }; | 
|  | 183 |  | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 184 |  | 
|  | 185 | /// Emit the cleanup.  For normal cleanups, this is run in the | 
|  | 186 | /// same EH context as when the cleanup was pushed, i.e. the | 
|  | 187 | /// immediately-enclosing context of the cleanup scope.  For | 
|  | 188 | /// EH cleanups, this is run in a terminate context. | 
|  | 189 | /// | 
|  | 190 | // \param flags cleanup kind. | 
|  | 191 | virtual void Emit(CodeGenFunction &CGF, Flags flags) = 0; | 
|  | 192 | }; | 
|  | 193 |  | 
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 194 | /// ConditionalCleanup stores the saved form of its parameters, | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 195 | /// then restores them and performs the cleanup. | 
| David Blaikie | 7e70d68 | 2015-08-18 22:40:54 +0000 | [diff] [blame] | 196 | template <class T, class... As> | 
|  | 197 | class ConditionalCleanup final : public Cleanup { | 
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 198 | typedef std::tuple<typename DominatingValue<As>::saved_type...> SavedTuple; | 
|  | 199 | SavedTuple Saved; | 
|  | 200 |  | 
|  | 201 | template <std::size_t... Is> | 
| Benjamin Kramer | 37508d3 | 2019-08-15 10:56:05 +0000 | [diff] [blame] | 202 | T restore(CodeGenFunction &CGF, std::index_sequence<Is...>) { | 
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 203 | // It's important that the restores are emitted in order. The braced init | 
| Simon Pilgrim | 2c51880 | 2017-03-30 14:13:19 +0000 | [diff] [blame] | 204 | // list guarantees that. | 
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 205 | return T{DominatingValue<As>::restore(CGF, std::get<Is>(Saved))...}; | 
|  | 206 | } | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 207 |  | 
| Craig Topper | 4f12f10 | 2014-03-12 06:41:41 +0000 | [diff] [blame] | 208 | void Emit(CodeGenFunction &CGF, Flags flags) override { | 
| Benjamin Kramer | 37508d3 | 2019-08-15 10:56:05 +0000 | [diff] [blame] | 209 | restore(CGF, std::index_sequence_for<As...>()).Emit(CGF, flags); | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 210 | } | 
|  | 211 |  | 
|  | 212 | public: | 
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 213 | ConditionalCleanup(typename DominatingValue<As>::saved_type... A) | 
|  | 214 | : Saved(A...) {} | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 215 |  | 
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 216 | ConditionalCleanup(SavedTuple Tuple) : Saved(std::move(Tuple)) {} | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 217 | }; | 
|  | 218 |  | 
|  | 219 | private: | 
|  | 220 | // The implementation for this class is in CGException.h and | 
|  | 221 | // CGException.cpp; the definition is here because it's used as a | 
|  | 222 | // member of CodeGenFunction. | 
|  | 223 |  | 
|  | 224 | /// The start of the scope-stack buffer, i.e. the allocated pointer | 
|  | 225 | /// for the buffer.  All of these pointers are either simultaneously | 
|  | 226 | /// null or simultaneously valid. | 
|  | 227 | char *StartOfBuffer; | 
|  | 228 |  | 
|  | 229 | /// The end of the buffer. | 
|  | 230 | char *EndOfBuffer; | 
|  | 231 |  | 
|  | 232 | /// The first valid entry in the buffer. | 
|  | 233 | char *StartOfData; | 
|  | 234 |  | 
|  | 235 | /// The innermost normal cleanup on the stack. | 
|  | 236 | stable_iterator InnermostNormalCleanup; | 
|  | 237 |  | 
|  | 238 | /// The innermost EH scope on the stack. | 
|  | 239 | stable_iterator InnermostEHScope; | 
|  | 240 |  | 
|  | 241 | /// The current set of branch fixups.  A branch fixup is a jump to | 
|  | 242 | /// an as-yet unemitted label, i.e. a label for which we don't yet | 
|  | 243 | /// know the EH stack depth.  Whenever we pop a cleanup, we have | 
|  | 244 | /// to thread all the current branch fixups through it. | 
|  | 245 | /// | 
|  | 246 | /// Fixups are recorded as the Use of the respective branch or | 
|  | 247 | /// switch statement.  The use points to the final destination. | 
|  | 248 | /// When popping out of a cleanup, these uses are threaded through | 
|  | 249 | /// the cleanup and adjusted to point to the new cleanup. | 
|  | 250 | /// | 
|  | 251 | /// Note that branches are allowed to jump into protected scopes | 
|  | 252 | /// in certain situations;  e.g. the following code is legal: | 
|  | 253 | ///     struct A { ~A(); }; // trivial ctor, non-trivial dtor | 
|  | 254 | ///     goto foo; | 
|  | 255 | ///     A a; | 
|  | 256 | ///    foo: | 
|  | 257 | ///     bar(); | 
|  | 258 | SmallVector<BranchFixup, 8> BranchFixups; | 
|  | 259 |  | 
|  | 260 | char *allocate(size_t Size); | 
| James Y Knight | 53c7616 | 2015-07-17 18:21:37 +0000 | [diff] [blame] | 261 | void deallocate(size_t Size); | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 262 |  | 
|  | 263 | void *pushCleanup(CleanupKind K, size_t DataSize); | 
|  | 264 |  | 
|  | 265 | public: | 
| Craig Topper | 8a13c41 | 2014-05-21 05:09:00 +0000 | [diff] [blame] | 266 | EHScopeStack() : StartOfBuffer(nullptr), EndOfBuffer(nullptr), | 
|  | 267 | StartOfData(nullptr), InnermostNormalCleanup(stable_end()), | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 268 | InnermostEHScope(stable_end()) {} | 
|  | 269 | ~EHScopeStack() { delete[] StartOfBuffer; } | 
|  | 270 |  | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 271 | /// Push a lazily-created cleanup on the stack. | 
| Benjamin Kramer | c582c89 | 2015-02-15 20:11:22 +0000 | [diff] [blame] | 272 | template <class T, class... As> void pushCleanup(CleanupKind Kind, As... A) { | 
| Benjamin Kramer | c3f8925 | 2016-10-20 14:27:22 +0000 | [diff] [blame] | 273 | static_assert(alignof(T) <= ScopeStackAlignment, | 
| James Y Knight | 53c7616 | 2015-07-17 18:21:37 +0000 | [diff] [blame] | 274 | "Cleanup's alignment is too large."); | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 275 | void *Buffer = pushCleanup(Kind, sizeof(T)); | 
| Benjamin Kramer | c582c89 | 2015-02-15 20:11:22 +0000 | [diff] [blame] | 276 | Cleanup *Obj = new (Buffer) T(A...); | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 277 | (void) Obj; | 
|  | 278 | } | 
|  | 279 |  | 
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 280 | /// Push a lazily-created cleanup on the stack. Tuple version. | 
|  | 281 | template <class T, class... As> | 
| Benjamin Kramer | 7f1f6b5 | 2015-03-12 23:46:55 +0000 | [diff] [blame] | 282 | void pushCleanupTuple(CleanupKind Kind, std::tuple<As...> A) { | 
| Benjamin Kramer | c3f8925 | 2016-10-20 14:27:22 +0000 | [diff] [blame] | 283 | static_assert(alignof(T) <= ScopeStackAlignment, | 
| James Y Knight | 53c7616 | 2015-07-17 18:21:37 +0000 | [diff] [blame] | 284 | "Cleanup's alignment is too large."); | 
| Benjamin Kramer | 51680bc | 2015-03-12 23:41:40 +0000 | [diff] [blame] | 285 | void *Buffer = pushCleanup(Kind, sizeof(T)); | 
|  | 286 | Cleanup *Obj = new (Buffer) T(std::move(A)); | 
|  | 287 | (void) Obj; | 
|  | 288 | } | 
|  | 289 |  | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 290 | // Feel free to add more variants of the following: | 
|  | 291 |  | 
|  | 292 | /// Push a cleanup with non-constant storage requirements on the | 
|  | 293 | /// stack.  The cleanup type must provide an additional static method: | 
|  | 294 | ///   static size_t getExtraSize(size_t); | 
|  | 295 | /// The argument to this method will be the value N, which will also | 
|  | 296 | /// be passed as the first argument to the constructor. | 
|  | 297 | /// | 
|  | 298 | /// The data stored in the extra storage must obey the same | 
|  | 299 | /// restrictions as normal cleanup member data. | 
|  | 300 | /// | 
|  | 301 | /// The pointer returned from this method is valid until the cleanup | 
|  | 302 | /// stack is modified. | 
| Benjamin Kramer | c582c89 | 2015-02-15 20:11:22 +0000 | [diff] [blame] | 303 | template <class T, class... As> | 
| Benjamin Kramer | 583089c | 2015-02-15 20:24:47 +0000 | [diff] [blame] | 304 | T *pushCleanupWithExtra(CleanupKind Kind, size_t N, As... A) { | 
| Benjamin Kramer | c3f8925 | 2016-10-20 14:27:22 +0000 | [diff] [blame] | 305 | static_assert(alignof(T) <= ScopeStackAlignment, | 
| James Y Knight | 53c7616 | 2015-07-17 18:21:37 +0000 | [diff] [blame] | 306 | "Cleanup's alignment is too large."); | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 307 | void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N)); | 
| Benjamin Kramer | c582c89 | 2015-02-15 20:11:22 +0000 | [diff] [blame] | 308 | return new (Buffer) T(N, A...); | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 309 | } | 
|  | 310 |  | 
|  | 311 | void pushCopyOfCleanup(CleanupKind Kind, const void *Cleanup, size_t Size) { | 
|  | 312 | void *Buffer = pushCleanup(Kind, Size); | 
|  | 313 | std::memcpy(Buffer, Cleanup, Size); | 
|  | 314 | } | 
|  | 315 |  | 
|  | 316 | /// Pops a cleanup scope off the stack.  This is private to CGCleanup.cpp. | 
|  | 317 | void popCleanup(); | 
|  | 318 |  | 
|  | 319 | /// Push a set of catch handlers on the stack.  The catch is | 
|  | 320 | /// uninitialized and will need to have the given number of handlers | 
|  | 321 | /// set on it. | 
|  | 322 | class EHCatchScope *pushCatch(unsigned NumHandlers); | 
|  | 323 |  | 
|  | 324 | /// Pops a catch scope off the stack.  This is private to CGException.cpp. | 
|  | 325 | void popCatch(); | 
|  | 326 |  | 
|  | 327 | /// Push an exceptions filter on the stack. | 
|  | 328 | class EHFilterScope *pushFilter(unsigned NumFilters); | 
|  | 329 |  | 
|  | 330 | /// Pops an exceptions filter off the stack. | 
|  | 331 | void popFilter(); | 
|  | 332 |  | 
|  | 333 | /// Push a terminate handler on the stack. | 
|  | 334 | void pushTerminate(); | 
|  | 335 |  | 
|  | 336 | /// Pops a terminate handler off the stack. | 
|  | 337 | void popTerminate(); | 
|  | 338 |  | 
| David Majnemer | dc012fa | 2015-04-22 21:38:15 +0000 | [diff] [blame] | 339 | // Returns true iff the current scope is either empty or contains only | 
|  | 340 | // lifetime markers, i.e. no real cleanup code | 
|  | 341 | bool containsOnlyLifetimeMarkers(stable_iterator Old) const; | 
|  | 342 |  | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 343 | /// Determines whether the exception-scopes stack is empty. | 
|  | 344 | bool empty() const { return StartOfData == EndOfBuffer; } | 
|  | 345 |  | 
| Akira Hatanaka | 8af7bb2 | 2016-04-01 22:58:55 +0000 | [diff] [blame] | 346 | bool requiresLandingPad() const; | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 347 |  | 
|  | 348 | /// Determines whether there are any normal cleanups on the stack. | 
|  | 349 | bool hasNormalCleanups() const { | 
|  | 350 | return InnermostNormalCleanup != stable_end(); | 
|  | 351 | } | 
|  | 352 |  | 
|  | 353 | /// Returns the innermost normal cleanup on the stack, or | 
|  | 354 | /// stable_end() if there are no normal cleanups. | 
|  | 355 | stable_iterator getInnermostNormalCleanup() const { | 
|  | 356 | return InnermostNormalCleanup; | 
|  | 357 | } | 
|  | 358 | stable_iterator getInnermostActiveNormalCleanup() const; | 
|  | 359 |  | 
|  | 360 | stable_iterator getInnermostEHScope() const { | 
|  | 361 | return InnermostEHScope; | 
|  | 362 | } | 
|  | 363 |  | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 364 |  | 
|  | 365 | /// An unstable reference to a scope-stack depth.  Invalidated by | 
|  | 366 | /// pushes but not pops. | 
|  | 367 | class iterator; | 
|  | 368 |  | 
|  | 369 | /// Returns an iterator pointing to the innermost EH scope. | 
|  | 370 | iterator begin() const; | 
|  | 371 |  | 
|  | 372 | /// Returns an iterator pointing to the outermost EH scope. | 
|  | 373 | iterator end() const; | 
|  | 374 |  | 
|  | 375 | /// Create a stable reference to the top of the EH stack.  The | 
|  | 376 | /// returned reference is valid until that scope is popped off the | 
|  | 377 | /// stack. | 
|  | 378 | stable_iterator stable_begin() const { | 
|  | 379 | return stable_iterator(EndOfBuffer - StartOfData); | 
|  | 380 | } | 
|  | 381 |  | 
|  | 382 | /// Create a stable reference to the bottom of the EH stack. | 
|  | 383 | static stable_iterator stable_end() { | 
|  | 384 | return stable_iterator(0); | 
|  | 385 | } | 
|  | 386 |  | 
|  | 387 | /// Translates an iterator into a stable_iterator. | 
|  | 388 | stable_iterator stabilize(iterator it) const; | 
|  | 389 |  | 
|  | 390 | /// Turn a stable reference to a scope depth into a unstable pointer | 
|  | 391 | /// to the EH stack. | 
|  | 392 | iterator find(stable_iterator save) const; | 
|  | 393 |  | 
| Reid Kleckner | d29f134 | 2013-06-19 17:07:50 +0000 | [diff] [blame] | 394 | /// Add a branch fixup to the current cleanup scope. | 
|  | 395 | BranchFixup &addBranchFixup() { | 
|  | 396 | assert(hasNormalCleanups() && "adding fixup in scope without cleanups"); | 
|  | 397 | BranchFixups.push_back(BranchFixup()); | 
|  | 398 | return BranchFixups.back(); | 
|  | 399 | } | 
|  | 400 |  | 
|  | 401 | unsigned getNumBranchFixups() const { return BranchFixups.size(); } | 
|  | 402 | BranchFixup &getBranchFixup(unsigned I) { | 
|  | 403 | assert(I < getNumBranchFixups()); | 
|  | 404 | return BranchFixups[I]; | 
|  | 405 | } | 
|  | 406 |  | 
|  | 407 | /// Pops lazily-removed fixups from the end of the list.  This | 
|  | 408 | /// should only be called by procedures which have just popped a | 
|  | 409 | /// cleanup or resolved one or more fixups. | 
|  | 410 | void popNullFixups(); | 
|  | 411 |  | 
|  | 412 | /// Clears the branch-fixups list.  This should only be called by | 
|  | 413 | /// ResolveAllBranchFixups. | 
|  | 414 | void clearFixups() { BranchFixups.clear(); } | 
|  | 415 | }; | 
|  | 416 |  | 
|  | 417 | } // namespace CodeGen | 
|  | 418 | } // namespace clang | 
|  | 419 |  | 
|  | 420 | #endif |