blob: 5e22a66ef4ff3f6104a263c9598da70f43fca9ff [file] [log] [blame]
John McCall36f893c2011-01-28 11:13:47 +00001//===-- CGCleanup.h - Classes for cleanups IR generation --------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// These classes support the generation of LLVM IR for cleanups.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef CLANG_CODEGEN_CGCLEANUP_H
15#define CLANG_CODEGEN_CGCLEANUP_H
16
Reid Kleckner6fe91e42013-06-09 16:45:02 +000017#include "clang/Basic/LLVM.h"
18#include "llvm/ADT/SmallPtrSet.h"
19#include "llvm/ADT/SmallVector.h"
John McCall36f893c2011-01-28 11:13:47 +000020
21namespace llvm {
22 class Value;
23 class BasicBlock;
Reid Kleckner6fe91e42013-06-09 16:45:02 +000024 class BranchInst;
John McCall36f893c2011-01-28 11:13:47 +000025}
26
27namespace clang {
28namespace CodeGen {
29
Reid Kleckner6fe91e42013-06-09 16:45:02 +000030class CodeGenFunction;
31
32/// A branch fixup. These are required when emitting a goto to a
33/// label which hasn't been emitted yet. The goto is optimistically
34/// emitted as a branch to the basic block for the label, and (if it
35/// occurs in a scope with non-trivial cleanups) a fixup is added to
36/// the innermost cleanup. When a (normal) cleanup is popped, any
37/// unresolved fixups in that scope are threaded through the cleanup.
38struct BranchFixup {
39 /// The block containing the terminator which needs to be modified
40 /// into a switch if this fixup is resolved into the current scope.
41 /// If null, LatestBranch points directly to the destination.
42 llvm::BasicBlock *OptimisticBranchBlock;
43
44 /// The ultimate destination of the branch.
45 ///
46 /// This can be set to null to indicate that this fixup was
47 /// successfully resolved.
48 llvm::BasicBlock *Destination;
49
50 /// The destination index value.
51 unsigned DestinationIndex;
52
53 /// The initial branch of the fixup.
54 llvm::BranchInst *InitialBranch;
55};
56
57template <class T> struct InvariantValue {
58 typedef T type;
59 typedef T saved_type;
60 static bool needsSaving(type value) { return false; }
61 static saved_type save(CodeGenFunction &CGF, type value) { return value; }
62 static type restore(CodeGenFunction &CGF, saved_type value) { return value; }
63};
64
65/// A metaprogramming class for ensuring that a value will dominate an
66/// arbitrary position in a function.
67template <class T> struct DominatingValue : InvariantValue<T> {};
68
69template <class T, bool mightBeInstruction =
70 llvm::is_base_of<llvm::Value, T>::value &&
71 !llvm::is_base_of<llvm::Constant, T>::value &&
72 !llvm::is_base_of<llvm::BasicBlock, T>::value>
73struct DominatingPointer;
74template <class T> struct DominatingPointer<T,false> : InvariantValue<T*> {};
75// template <class T> struct DominatingPointer<T,true> at end of file
76
77template <class T> struct DominatingValue<T*> : DominatingPointer<T> {};
78
79enum CleanupKind {
80 EHCleanup = 0x1,
81 NormalCleanup = 0x2,
82 NormalAndEHCleanup = EHCleanup | NormalCleanup,
83
84 InactiveCleanup = 0x4,
85 InactiveEHCleanup = EHCleanup | InactiveCleanup,
86 InactiveNormalCleanup = NormalCleanup | InactiveCleanup,
87 InactiveNormalAndEHCleanup = NormalAndEHCleanup | InactiveCleanup
88};
89
90/// A stack of scopes which respond to exceptions, including cleanups
91/// and catch blocks.
92class EHScopeStack {
93public:
94 /// A saved depth on the scope stack. This is necessary because
95 /// pushing scopes onto the stack invalidates iterators.
96 class stable_iterator {
97 friend class EHScopeStack;
98
99 /// Offset from StartOfData to EndOfBuffer.
100 ptrdiff_t Size;
101
102 stable_iterator(ptrdiff_t Size) : Size(Size) {}
103
104 public:
105 static stable_iterator invalid() { return stable_iterator(-1); }
106 stable_iterator() : Size(-1) {}
107
108 bool isValid() const { return Size >= 0; }
109
110 /// Returns true if this scope encloses I.
111 /// Returns false if I is invalid.
112 /// This scope must be valid.
113 bool encloses(stable_iterator I) const { return Size <= I.Size; }
114
115 /// Returns true if this scope strictly encloses I: that is,
116 /// if it encloses I and is not I.
117 /// Returns false is I is invalid.
118 /// This scope must be valid.
119 bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; }
120
121 friend bool operator==(stable_iterator A, stable_iterator B) {
122 return A.Size == B.Size;
123 }
124 friend bool operator!=(stable_iterator A, stable_iterator B) {
125 return A.Size != B.Size;
126 }
127 };
128
129 /// Information for lazily generating a cleanup. Subclasses must be
130 /// POD-like: cleanups will not be destructed, and they will be
131 /// allocated on the cleanup stack and freely copied and moved
132 /// around.
133 ///
134 /// Cleanup implementations should generally be declared in an
135 /// anonymous namespace.
136 class Cleanup {
137 // Anchor the construction vtable.
138 virtual void anchor();
139 public:
140 /// Generation flags.
141 class Flags {
142 enum {
143 F_IsForEH = 0x1,
144 F_IsNormalCleanupKind = 0x2,
145 F_IsEHCleanupKind = 0x4
146 };
147 unsigned flags;
148
149 public:
150 Flags() : flags(0) {}
151
152 /// isForEH - true if the current emission is for an EH cleanup.
153 bool isForEHCleanup() const { return flags & F_IsForEH; }
154 bool isForNormalCleanup() const { return !isForEHCleanup(); }
155 void setIsForEHCleanup() { flags |= F_IsForEH; }
156
157 bool isNormalCleanupKind() const { return flags & F_IsNormalCleanupKind; }
158 void setIsNormalCleanupKind() { flags |= F_IsNormalCleanupKind; }
159
160 /// isEHCleanupKind - true if the cleanup was pushed as an EH
161 /// cleanup.
162 bool isEHCleanupKind() const { return flags & F_IsEHCleanupKind; }
163 void setIsEHCleanupKind() { flags |= F_IsEHCleanupKind; }
164 };
165
166 // Provide a virtual destructor to suppress a very common warning
167 // that unfortunately cannot be suppressed without this. Cleanups
168 // should not rely on this destructor ever being called.
169 virtual ~Cleanup() {}
170
171 /// Emit the cleanup. For normal cleanups, this is run in the
172 /// same EH context as when the cleanup was pushed, i.e. the
173 /// immediately-enclosing context of the cleanup scope. For
174 /// EH cleanups, this is run in a terminate context.
175 ///
176 // \param flags cleanup kind.
177 virtual void Emit(CodeGenFunction &CGF, Flags flags) = 0;
178 };
179
180 /// ConditionalCleanupN stores the saved form of its N parameters,
181 /// then restores them and performs the cleanup.
182 template <class T, class A0>
183 class ConditionalCleanup1 : public Cleanup {
184 typedef typename DominatingValue<A0>::saved_type A0_saved;
185 A0_saved a0_saved;
186
187 void Emit(CodeGenFunction &CGF, Flags flags) {
188 A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
189 T(a0).Emit(CGF, flags);
190 }
191
192 public:
193 ConditionalCleanup1(A0_saved a0)
194 : a0_saved(a0) {}
195 };
196
197 template <class T, class A0, class A1>
198 class ConditionalCleanup2 : public Cleanup {
199 typedef typename DominatingValue<A0>::saved_type A0_saved;
200 typedef typename DominatingValue<A1>::saved_type A1_saved;
201 A0_saved a0_saved;
202 A1_saved a1_saved;
203
204 void Emit(CodeGenFunction &CGF, Flags flags) {
205 A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
206 A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved);
207 T(a0, a1).Emit(CGF, flags);
208 }
209
210 public:
211 ConditionalCleanup2(A0_saved a0, A1_saved a1)
212 : a0_saved(a0), a1_saved(a1) {}
213 };
214
215 template <class T, class A0, class A1, class A2>
216 class ConditionalCleanup3 : public Cleanup {
217 typedef typename DominatingValue<A0>::saved_type A0_saved;
218 typedef typename DominatingValue<A1>::saved_type A1_saved;
219 typedef typename DominatingValue<A2>::saved_type A2_saved;
220 A0_saved a0_saved;
221 A1_saved a1_saved;
222 A2_saved a2_saved;
223
224 void Emit(CodeGenFunction &CGF, Flags flags) {
225 A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
226 A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved);
227 A2 a2 = DominatingValue<A2>::restore(CGF, a2_saved);
228 T(a0, a1, a2).Emit(CGF, flags);
229 }
230
231 public:
232 ConditionalCleanup3(A0_saved a0, A1_saved a1, A2_saved a2)
233 : a0_saved(a0), a1_saved(a1), a2_saved(a2) {}
234 };
235
236 template <class T, class A0, class A1, class A2, class A3>
237 class ConditionalCleanup4 : public Cleanup {
238 typedef typename DominatingValue<A0>::saved_type A0_saved;
239 typedef typename DominatingValue<A1>::saved_type A1_saved;
240 typedef typename DominatingValue<A2>::saved_type A2_saved;
241 typedef typename DominatingValue<A3>::saved_type A3_saved;
242 A0_saved a0_saved;
243 A1_saved a1_saved;
244 A2_saved a2_saved;
245 A3_saved a3_saved;
246
247 void Emit(CodeGenFunction &CGF, Flags flags) {
248 A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
249 A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved);
250 A2 a2 = DominatingValue<A2>::restore(CGF, a2_saved);
251 A3 a3 = DominatingValue<A3>::restore(CGF, a3_saved);
252 T(a0, a1, a2, a3).Emit(CGF, flags);
253 }
254
255 public:
256 ConditionalCleanup4(A0_saved a0, A1_saved a1, A2_saved a2, A3_saved a3)
257 : a0_saved(a0), a1_saved(a1), a2_saved(a2), a3_saved(a3) {}
258 };
259
260private:
261 // The implementation for this class is in CGException.h and
262 // CGException.cpp; the definition is here because it's used as a
263 // member of CodeGenFunction.
264
265 /// The start of the scope-stack buffer, i.e. the allocated pointer
266 /// for the buffer. All of these pointers are either simultaneously
267 /// null or simultaneously valid.
268 char *StartOfBuffer;
269
270 /// The end of the buffer.
271 char *EndOfBuffer;
272
273 /// The first valid entry in the buffer.
274 char *StartOfData;
275
276 /// The innermost normal cleanup on the stack.
277 stable_iterator InnermostNormalCleanup;
278
279 /// The innermost EH scope on the stack.
280 stable_iterator InnermostEHScope;
281
282 /// The current set of branch fixups. A branch fixup is a jump to
283 /// an as-yet unemitted label, i.e. a label for which we don't yet
284 /// know the EH stack depth. Whenever we pop a cleanup, we have
285 /// to thread all the current branch fixups through it.
286 ///
287 /// Fixups are recorded as the Use of the respective branch or
288 /// switch statement. The use points to the final destination.
289 /// When popping out of a cleanup, these uses are threaded through
290 /// the cleanup and adjusted to point to the new cleanup.
291 ///
292 /// Note that branches are allowed to jump into protected scopes
293 /// in certain situations; e.g. the following code is legal:
294 /// struct A { ~A(); }; // trivial ctor, non-trivial dtor
295 /// goto foo;
296 /// A a;
297 /// foo:
298 /// bar();
299 SmallVector<BranchFixup, 8> BranchFixups;
300
301 char *allocate(size_t Size);
302
303 void *pushCleanup(CleanupKind K, size_t DataSize);
304
305public:
306 EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0),
307 InnermostNormalCleanup(stable_end()),
308 InnermostEHScope(stable_end()) {}
309 ~EHScopeStack() { delete[] StartOfBuffer; }
310
311 // Variadic templates would make this not terrible.
312
313 /// Push a lazily-created cleanup on the stack.
314 template <class T>
315 void pushCleanup(CleanupKind Kind) {
316 void *Buffer = pushCleanup(Kind, sizeof(T));
317 Cleanup *Obj = new(Buffer) T();
318 (void) Obj;
319 }
320
321 /// Push a lazily-created cleanup on the stack.
322 template <class T, class A0>
323 void pushCleanup(CleanupKind Kind, A0 a0) {
324 void *Buffer = pushCleanup(Kind, sizeof(T));
325 Cleanup *Obj = new(Buffer) T(a0);
326 (void) Obj;
327 }
328
329 /// Push a lazily-created cleanup on the stack.
330 template <class T, class A0, class A1>
331 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1) {
332 void *Buffer = pushCleanup(Kind, sizeof(T));
333 Cleanup *Obj = new(Buffer) T(a0, a1);
334 (void) Obj;
335 }
336
337 /// Push a lazily-created cleanup on the stack.
338 template <class T, class A0, class A1, class A2>
339 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) {
340 void *Buffer = pushCleanup(Kind, sizeof(T));
341 Cleanup *Obj = new(Buffer) T(a0, a1, a2);
342 (void) Obj;
343 }
344
345 /// Push a lazily-created cleanup on the stack.
346 template <class T, class A0, class A1, class A2, class A3>
347 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) {
348 void *Buffer = pushCleanup(Kind, sizeof(T));
349 Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3);
350 (void) Obj;
351 }
352
353 /// Push a lazily-created cleanup on the stack.
354 template <class T, class A0, class A1, class A2, class A3, class A4>
355 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) {
356 void *Buffer = pushCleanup(Kind, sizeof(T));
357 Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4);
358 (void) Obj;
359 }
360
361 // Feel free to add more variants of the following:
362
363 /// Push a cleanup with non-constant storage requirements on the
364 /// stack. The cleanup type must provide an additional static method:
365 /// static size_t getExtraSize(size_t);
366 /// The argument to this method will be the value N, which will also
367 /// be passed as the first argument to the constructor.
368 ///
369 /// The data stored in the extra storage must obey the same
370 /// restrictions as normal cleanup member data.
371 ///
372 /// The pointer returned from this method is valid until the cleanup
373 /// stack is modified.
374 template <class T, class A0, class A1, class A2>
375 T *pushCleanupWithExtra(CleanupKind Kind, size_t N, A0 a0, A1 a1, A2 a2) {
376 void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N));
377 return new (Buffer) T(N, a0, a1, a2);
378 }
379
380 /// Pops a cleanup scope off the stack. This is private to CGCleanup.cpp.
381 void popCleanup();
382
383 /// Push a set of catch handlers on the stack. The catch is
384 /// uninitialized and will need to have the given number of handlers
385 /// set on it.
386 class EHCatchScope *pushCatch(unsigned NumHandlers);
387
388 /// Pops a catch scope off the stack. This is private to CGException.cpp.
389 void popCatch();
390
391 /// Push an exceptions filter on the stack.
392 class EHFilterScope *pushFilter(unsigned NumFilters);
393
394 /// Pops an exceptions filter off the stack.
395 void popFilter();
396
397 /// Push a terminate handler on the stack.
398 void pushTerminate();
399
400 /// Pops a terminate handler off the stack.
401 void popTerminate();
402
403 /// Determines whether the exception-scopes stack is empty.
404 bool empty() const { return StartOfData == EndOfBuffer; }
405
406 bool requiresLandingPad() const {
407 return InnermostEHScope != stable_end();
408 }
409
410 /// Determines whether there are any normal cleanups on the stack.
411 bool hasNormalCleanups() const {
412 return InnermostNormalCleanup != stable_end();
413 }
414
415 /// Returns the innermost normal cleanup on the stack, or
416 /// stable_end() if there are no normal cleanups.
417 stable_iterator getInnermostNormalCleanup() const {
418 return InnermostNormalCleanup;
419 }
420 stable_iterator getInnermostActiveNormalCleanup() const;
421
422 stable_iterator getInnermostEHScope() const {
423 return InnermostEHScope;
424 }
425
426 stable_iterator getInnermostActiveEHScope() const;
427
428 /// An unstable reference to a scope-stack depth. Invalidated by
429 /// pushes but not pops.
430 class iterator;
431
432 /// Returns an iterator pointing to the innermost EH scope.
433 iterator begin() const;
434
435 /// Returns an iterator pointing to the outermost EH scope.
436 iterator end() const;
437
438 /// Create a stable reference to the top of the EH stack. The
439 /// returned reference is valid until that scope is popped off the
440 /// stack.
441 stable_iterator stable_begin() const {
442 return stable_iterator(EndOfBuffer - StartOfData);
443 }
444
445 /// Create a stable reference to the bottom of the EH stack.
446 static stable_iterator stable_end() {
447 return stable_iterator(0);
448 }
449
450 /// Translates an iterator into a stable_iterator.
451 stable_iterator stabilize(iterator it) const;
452
453 /// Turn a stable reference to a scope depth into a unstable pointer
454 /// to the EH stack.
455 iterator find(stable_iterator save) const;
456
457 /// Removes the cleanup pointed to by the given stable_iterator.
458 void removeCleanup(stable_iterator save);
459
460 /// Add a branch fixup to the current cleanup scope.
461 BranchFixup &addBranchFixup() {
462 assert(hasNormalCleanups() && "adding fixup in scope without cleanups");
463 BranchFixups.push_back(BranchFixup());
464 return BranchFixups.back();
465 }
466
467 unsigned getNumBranchFixups() const { return BranchFixups.size(); }
468 BranchFixup &getBranchFixup(unsigned I) {
469 assert(I < getNumBranchFixups());
470 return BranchFixups[I];
471 }
472
473 /// Pops lazily-removed fixups from the end of the list. This
474 /// should only be called by procedures which have just popped a
475 /// cleanup or resolved one or more fixups.
476 void popNullFixups();
477
478 /// Clears the branch-fixups list. This should only be called by
479 /// ResolveAllBranchFixups.
480 void clearFixups() { BranchFixups.clear(); }
481};
482
John McCall36f893c2011-01-28 11:13:47 +0000483/// A protected scope for zero-cost EH handling.
484class EHScope {
485 llvm::BasicBlock *CachedLandingPad;
John McCall777d6e52011-08-11 02:22:43 +0000486 llvm::BasicBlock *CachedEHDispatchBlock;
John McCall36f893c2011-01-28 11:13:47 +0000487
John McCall777d6e52011-08-11 02:22:43 +0000488 EHScopeStack::stable_iterator EnclosingEHScope;
489
490 class CommonBitFields {
491 friend class EHScope;
492 unsigned Kind : 2;
493 };
494 enum { NumCommonBits = 2 };
John McCall36f893c2011-01-28 11:13:47 +0000495
496protected:
John McCall777d6e52011-08-11 02:22:43 +0000497 class CatchBitFields {
498 friend class EHCatchScope;
499 unsigned : NumCommonBits;
500
501 unsigned NumHandlers : 32 - NumCommonBits;
502 };
503
504 class CleanupBitFields {
505 friend class EHCleanupScope;
506 unsigned : NumCommonBits;
507
508 /// Whether this cleanup needs to be run along normal edges.
509 unsigned IsNormalCleanup : 1;
510
511 /// Whether this cleanup needs to be run along exception edges.
512 unsigned IsEHCleanup : 1;
513
514 /// Whether this cleanup is currently active.
515 unsigned IsActive : 1;
516
517 /// Whether the normal cleanup should test the activation flag.
518 unsigned TestFlagInNormalCleanup : 1;
519
520 /// Whether the EH cleanup should test the activation flag.
521 unsigned TestFlagInEHCleanup : 1;
522
523 /// The amount of extra storage needed by the Cleanup.
524 /// Always a multiple of the scope-stack alignment.
525 unsigned CleanupSize : 12;
526
527 /// The number of fixups required by enclosing scopes (not including
528 /// this one). If this is the top cleanup scope, all the fixups
529 /// from this index onwards belong to this scope.
530 unsigned FixupDepth : 32 - 17 - NumCommonBits; // currently 13
531 };
532
533 class FilterBitFields {
534 friend class EHFilterScope;
535 unsigned : NumCommonBits;
536
537 unsigned NumFilters : 32 - NumCommonBits;
538 };
539
540 union {
541 CommonBitFields CommonBits;
542 CatchBitFields CatchBits;
543 CleanupBitFields CleanupBits;
544 FilterBitFields FilterBits;
545 };
John McCall36f893c2011-01-28 11:13:47 +0000546
547public:
548 enum Kind { Cleanup, Catch, Terminate, Filter };
549
John McCall777d6e52011-08-11 02:22:43 +0000550 EHScope(Kind kind, EHScopeStack::stable_iterator enclosingEHScope)
551 : CachedLandingPad(0), CachedEHDispatchBlock(0),
552 EnclosingEHScope(enclosingEHScope) {
553 CommonBits.Kind = kind;
554 }
John McCall36f893c2011-01-28 11:13:47 +0000555
John McCall777d6e52011-08-11 02:22:43 +0000556 Kind getKind() const { return static_cast<Kind>(CommonBits.Kind); }
John McCall36f893c2011-01-28 11:13:47 +0000557
558 llvm::BasicBlock *getCachedLandingPad() const {
559 return CachedLandingPad;
560 }
561
John McCall777d6e52011-08-11 02:22:43 +0000562 void setCachedLandingPad(llvm::BasicBlock *block) {
563 CachedLandingPad = block;
564 }
565
566 llvm::BasicBlock *getCachedEHDispatchBlock() const {
567 return CachedEHDispatchBlock;
568 }
569
570 void setCachedEHDispatchBlock(llvm::BasicBlock *block) {
571 CachedEHDispatchBlock = block;
572 }
573
574 bool hasEHBranches() const {
575 if (llvm::BasicBlock *block = getCachedEHDispatchBlock())
576 return !block->use_empty();
577 return false;
578 }
579
580 EHScopeStack::stable_iterator getEnclosingEHScope() const {
581 return EnclosingEHScope;
John McCall36f893c2011-01-28 11:13:47 +0000582 }
583};
584
585/// A scope which attempts to handle some, possibly all, types of
586/// exceptions.
587///
James Dennett2ee5ba32012-06-15 22:10:14 +0000588/// Objective C \@finally blocks are represented using a cleanup scope
John McCall36f893c2011-01-28 11:13:47 +0000589/// after the catch scope.
590class EHCatchScope : public EHScope {
John McCall36f893c2011-01-28 11:13:47 +0000591 // In effect, we have a flexible array member
592 // Handler Handlers[0];
593 // But that's only standard in C99, not C++, so we have to do
594 // annoying pointer arithmetic instead.
595
596public:
597 struct Handler {
598 /// A type info value, or null (C++ null, not an LLVM null pointer)
599 /// for a catch-all.
600 llvm::Value *Type;
601
602 /// The catch handler for this type.
603 llvm::BasicBlock *Block;
604
John McCall777d6e52011-08-11 02:22:43 +0000605 bool isCatchAll() const { return Type == 0; }
John McCall36f893c2011-01-28 11:13:47 +0000606 };
607
608private:
609 friend class EHScopeStack;
610
611 Handler *getHandlers() {
612 return reinterpret_cast<Handler*>(this+1);
613 }
614
615 const Handler *getHandlers() const {
616 return reinterpret_cast<const Handler*>(this+1);
617 }
618
619public:
620 static size_t getSizeForNumHandlers(unsigned N) {
621 return sizeof(EHCatchScope) + N * sizeof(Handler);
622 }
623
John McCall777d6e52011-08-11 02:22:43 +0000624 EHCatchScope(unsigned numHandlers,
625 EHScopeStack::stable_iterator enclosingEHScope)
626 : EHScope(Catch, enclosingEHScope) {
627 CatchBits.NumHandlers = numHandlers;
John McCall36f893c2011-01-28 11:13:47 +0000628 }
629
630 unsigned getNumHandlers() const {
John McCall777d6e52011-08-11 02:22:43 +0000631 return CatchBits.NumHandlers;
John McCall36f893c2011-01-28 11:13:47 +0000632 }
633
634 void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) {
635 setHandler(I, /*catchall*/ 0, Block);
636 }
637
638 void setHandler(unsigned I, llvm::Value *Type, llvm::BasicBlock *Block) {
639 assert(I < getNumHandlers());
640 getHandlers()[I].Type = Type;
641 getHandlers()[I].Block = Block;
642 }
643
644 const Handler &getHandler(unsigned I) const {
645 assert(I < getNumHandlers());
646 return getHandlers()[I];
647 }
648
649 typedef const Handler *iterator;
650 iterator begin() const { return getHandlers(); }
651 iterator end() const { return getHandlers() + getNumHandlers(); }
652
653 static bool classof(const EHScope *Scope) {
654 return Scope->getKind() == Catch;
655 }
656};
657
658/// A cleanup scope which generates the cleanup blocks lazily.
659class EHCleanupScope : public EHScope {
John McCall36f893c2011-01-28 11:13:47 +0000660 /// The nearest normal cleanup scope enclosing this one.
661 EHScopeStack::stable_iterator EnclosingNormal;
662
John McCall777d6e52011-08-11 02:22:43 +0000663 /// The nearest EH scope enclosing this one.
John McCall36f893c2011-01-28 11:13:47 +0000664 EHScopeStack::stable_iterator EnclosingEH;
665
666 /// The dual entry/exit block along the normal edge. This is lazily
667 /// created if needed before the cleanup is popped.
668 llvm::BasicBlock *NormalBlock;
669
John McCall36f893c2011-01-28 11:13:47 +0000670 /// An optional i1 variable indicating whether this cleanup has been
671 /// activated yet.
672 llvm::AllocaInst *ActiveFlag;
673
674 /// Extra information required for cleanups that have resolved
675 /// branches through them. This has to be allocated on the side
676 /// because everything on the cleanup stack has be trivially
677 /// movable.
678 struct ExtInfo {
679 /// The destinations of normal branch-afters and branch-throughs.
680 llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches;
681
682 /// Normal branch-afters.
Chris Lattner686775d2011-07-20 06:58:45 +0000683 SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
John McCall36f893c2011-01-28 11:13:47 +0000684 BranchAfters;
John McCall36f893c2011-01-28 11:13:47 +0000685 };
686 mutable struct ExtInfo *ExtInfo;
687
688 struct ExtInfo &getExtInfo() {
689 if (!ExtInfo) ExtInfo = new struct ExtInfo();
690 return *ExtInfo;
691 }
692
693 const struct ExtInfo &getExtInfo() const {
694 if (!ExtInfo) ExtInfo = new struct ExtInfo();
695 return *ExtInfo;
696 }
697
698public:
699 /// Gets the size required for a lazy cleanup scope with the given
700 /// cleanup-data requirements.
701 static size_t getSizeForCleanupSize(size_t Size) {
702 return sizeof(EHCleanupScope) + Size;
703 }
704
705 size_t getAllocatedSize() const {
John McCall777d6e52011-08-11 02:22:43 +0000706 return sizeof(EHCleanupScope) + CleanupBits.CleanupSize;
John McCall36f893c2011-01-28 11:13:47 +0000707 }
708
John McCall777d6e52011-08-11 02:22:43 +0000709 EHCleanupScope(bool isNormal, bool isEH, bool isActive,
710 unsigned cleanupSize, unsigned fixupDepth,
711 EHScopeStack::stable_iterator enclosingNormal,
712 EHScopeStack::stable_iterator enclosingEH)
713 : EHScope(EHScope::Cleanup, enclosingEH), EnclosingNormal(enclosingNormal),
714 NormalBlock(0), ActiveFlag(0), ExtInfo(0) {
715 CleanupBits.IsNormalCleanup = isNormal;
716 CleanupBits.IsEHCleanup = isEH;
717 CleanupBits.IsActive = isActive;
718 CleanupBits.TestFlagInNormalCleanup = false;
719 CleanupBits.TestFlagInEHCleanup = false;
720 CleanupBits.CleanupSize = cleanupSize;
721 CleanupBits.FixupDepth = fixupDepth;
722
723 assert(CleanupBits.CleanupSize == cleanupSize && "cleanup size overflow");
John McCall36f893c2011-01-28 11:13:47 +0000724 }
725
726 ~EHCleanupScope() {
727 delete ExtInfo;
728 }
729
John McCall777d6e52011-08-11 02:22:43 +0000730 bool isNormalCleanup() const { return CleanupBits.IsNormalCleanup; }
John McCall36f893c2011-01-28 11:13:47 +0000731 llvm::BasicBlock *getNormalBlock() const { return NormalBlock; }
732 void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; }
733
John McCall777d6e52011-08-11 02:22:43 +0000734 bool isEHCleanup() const { return CleanupBits.IsEHCleanup; }
735 llvm::BasicBlock *getEHBlock() const { return getCachedEHDispatchBlock(); }
736 void setEHBlock(llvm::BasicBlock *BB) { setCachedEHDispatchBlock(BB); }
John McCall36f893c2011-01-28 11:13:47 +0000737
John McCall777d6e52011-08-11 02:22:43 +0000738 bool isActive() const { return CleanupBits.IsActive; }
739 void setActive(bool A) { CleanupBits.IsActive = A; }
John McCall36f893c2011-01-28 11:13:47 +0000740
741 llvm::AllocaInst *getActiveFlag() const { return ActiveFlag; }
742 void setActiveFlag(llvm::AllocaInst *Var) { ActiveFlag = Var; }
743
John McCall777d6e52011-08-11 02:22:43 +0000744 void setTestFlagInNormalCleanup() {
745 CleanupBits.TestFlagInNormalCleanup = true;
746 }
747 bool shouldTestFlagInNormalCleanup() const {
748 return CleanupBits.TestFlagInNormalCleanup;
749 }
John McCall36f893c2011-01-28 11:13:47 +0000750
John McCall777d6e52011-08-11 02:22:43 +0000751 void setTestFlagInEHCleanup() {
752 CleanupBits.TestFlagInEHCleanup = true;
753 }
754 bool shouldTestFlagInEHCleanup() const {
755 return CleanupBits.TestFlagInEHCleanup;
756 }
John McCall36f893c2011-01-28 11:13:47 +0000757
John McCall777d6e52011-08-11 02:22:43 +0000758 unsigned getFixupDepth() const { return CleanupBits.FixupDepth; }
John McCall36f893c2011-01-28 11:13:47 +0000759 EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
760 return EnclosingNormal;
761 }
John McCall36f893c2011-01-28 11:13:47 +0000762
John McCall777d6e52011-08-11 02:22:43 +0000763 size_t getCleanupSize() const { return CleanupBits.CleanupSize; }
John McCall36f893c2011-01-28 11:13:47 +0000764 void *getCleanupBuffer() { return this + 1; }
765
766 EHScopeStack::Cleanup *getCleanup() {
767 return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
768 }
769
770 /// True if this cleanup scope has any branch-afters or branch-throughs.
771 bool hasBranches() const { return ExtInfo && !ExtInfo->Branches.empty(); }
772
773 /// Add a branch-after to this cleanup scope. A branch-after is a
774 /// branch from a point protected by this (normal) cleanup to a
775 /// point in the normal cleanup scope immediately containing it.
776 /// For example,
777 /// for (;;) { A a; break; }
778 /// contains a branch-after.
779 ///
780 /// Branch-afters each have their own destination out of the
781 /// cleanup, guaranteed distinct from anything else threaded through
782 /// it. Therefore branch-afters usually force a switch after the
783 /// cleanup.
784 void addBranchAfter(llvm::ConstantInt *Index,
785 llvm::BasicBlock *Block) {
786 struct ExtInfo &ExtInfo = getExtInfo();
787 if (ExtInfo.Branches.insert(Block))
788 ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index));
789 }
790
791 /// Return the number of unique branch-afters on this scope.
792 unsigned getNumBranchAfters() const {
793 return ExtInfo ? ExtInfo->BranchAfters.size() : 0;
794 }
795
796 llvm::BasicBlock *getBranchAfterBlock(unsigned I) const {
797 assert(I < getNumBranchAfters());
798 return ExtInfo->BranchAfters[I].first;
799 }
800
801 llvm::ConstantInt *getBranchAfterIndex(unsigned I) const {
802 assert(I < getNumBranchAfters());
803 return ExtInfo->BranchAfters[I].second;
804 }
805
806 /// Add a branch-through to this cleanup scope. A branch-through is
807 /// a branch from a scope protected by this (normal) cleanup to an
808 /// enclosing scope other than the immediately-enclosing normal
809 /// cleanup scope.
810 ///
811 /// In the following example, the branch through B's scope is a
812 /// branch-through, while the branch through A's scope is a
813 /// branch-after:
814 /// for (;;) { A a; B b; break; }
815 ///
816 /// All branch-throughs have a common destination out of the
817 /// cleanup, one possibly shared with the fall-through. Therefore
818 /// branch-throughs usually don't force a switch after the cleanup.
819 ///
820 /// \return true if the branch-through was new to this scope
821 bool addBranchThrough(llvm::BasicBlock *Block) {
822 return getExtInfo().Branches.insert(Block);
823 }
824
825 /// Determines if this cleanup scope has any branch throughs.
826 bool hasBranchThroughs() const {
827 if (!ExtInfo) return false;
828 return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size());
829 }
830
John McCall36f893c2011-01-28 11:13:47 +0000831 static bool classof(const EHScope *Scope) {
832 return (Scope->getKind() == Cleanup);
833 }
834};
835
836/// An exceptions scope which filters exceptions thrown through it.
837/// Only exceptions matching the filter types will be permitted to be
838/// thrown.
839///
840/// This is used to implement C++ exception specifications.
841class EHFilterScope : public EHScope {
John McCall36f893c2011-01-28 11:13:47 +0000842 // Essentially ends in a flexible array member:
843 // llvm::Value *FilterTypes[0];
844
845 llvm::Value **getFilters() {
846 return reinterpret_cast<llvm::Value**>(this+1);
847 }
848
849 llvm::Value * const *getFilters() const {
850 return reinterpret_cast<llvm::Value* const *>(this+1);
851 }
852
853public:
John McCall777d6e52011-08-11 02:22:43 +0000854 EHFilterScope(unsigned numFilters)
855 : EHScope(Filter, EHScopeStack::stable_end()) {
856 FilterBits.NumFilters = numFilters;
John McCall36f893c2011-01-28 11:13:47 +0000857 }
858
John McCall777d6e52011-08-11 02:22:43 +0000859 static size_t getSizeForNumFilters(unsigned numFilters) {
860 return sizeof(EHFilterScope) + numFilters * sizeof(llvm::Value*);
John McCall36f893c2011-01-28 11:13:47 +0000861 }
862
John McCall777d6e52011-08-11 02:22:43 +0000863 unsigned getNumFilters() const { return FilterBits.NumFilters; }
864
865 void setFilter(unsigned i, llvm::Value *filterValue) {
866 assert(i < getNumFilters());
867 getFilters()[i] = filterValue;
John McCall36f893c2011-01-28 11:13:47 +0000868 }
869
John McCall777d6e52011-08-11 02:22:43 +0000870 llvm::Value *getFilter(unsigned i) const {
871 assert(i < getNumFilters());
872 return getFilters()[i];
873 }
874
875 static bool classof(const EHScope *scope) {
876 return scope->getKind() == Filter;
John McCall36f893c2011-01-28 11:13:47 +0000877 }
878};
879
880/// An exceptions scope which calls std::terminate if any exception
881/// reaches it.
882class EHTerminateScope : public EHScope {
John McCall36f893c2011-01-28 11:13:47 +0000883public:
John McCall777d6e52011-08-11 02:22:43 +0000884 EHTerminateScope(EHScopeStack::stable_iterator enclosingEHScope)
885 : EHScope(Terminate, enclosingEHScope) {}
John McCall36f893c2011-01-28 11:13:47 +0000886 static size_t getSize() { return sizeof(EHTerminateScope); }
887
John McCall777d6e52011-08-11 02:22:43 +0000888 static bool classof(const EHScope *scope) {
889 return scope->getKind() == Terminate;
John McCall36f893c2011-01-28 11:13:47 +0000890 }
891};
892
893/// A non-stable pointer into the scope stack.
894class EHScopeStack::iterator {
895 char *Ptr;
896
897 friend class EHScopeStack;
898 explicit iterator(char *Ptr) : Ptr(Ptr) {}
899
900public:
901 iterator() : Ptr(0) {}
902
903 EHScope *get() const {
904 return reinterpret_cast<EHScope*>(Ptr);
905 }
906
907 EHScope *operator->() const { return get(); }
908 EHScope &operator*() const { return *get(); }
909
910 iterator &operator++() {
911 switch (get()->getKind()) {
912 case EHScope::Catch:
913 Ptr += EHCatchScope::getSizeForNumHandlers(
914 static_cast<const EHCatchScope*>(get())->getNumHandlers());
915 break;
916
917 case EHScope::Filter:
918 Ptr += EHFilterScope::getSizeForNumFilters(
919 static_cast<const EHFilterScope*>(get())->getNumFilters());
920 break;
921
922 case EHScope::Cleanup:
923 Ptr += static_cast<const EHCleanupScope*>(get())
924 ->getAllocatedSize();
925 break;
926
927 case EHScope::Terminate:
928 Ptr += EHTerminateScope::getSize();
929 break;
930 }
931
932 return *this;
933 }
934
935 iterator next() {
936 iterator copy = *this;
937 ++copy;
938 return copy;
939 }
940
941 iterator operator++(int) {
942 iterator copy = *this;
943 operator++();
944 return copy;
945 }
946
947 bool encloses(iterator other) const { return Ptr >= other.Ptr; }
948 bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; }
949
950 bool operator==(iterator other) const { return Ptr == other.Ptr; }
951 bool operator!=(iterator other) const { return Ptr != other.Ptr; }
952};
953
954inline EHScopeStack::iterator EHScopeStack::begin() const {
955 return iterator(StartOfData);
956}
957
958inline EHScopeStack::iterator EHScopeStack::end() const {
959 return iterator(EndOfBuffer);
960}
961
962inline void EHScopeStack::popCatch() {
963 assert(!empty() && "popping exception stack when not empty");
964
John McCall777d6e52011-08-11 02:22:43 +0000965 EHCatchScope &scope = cast<EHCatchScope>(*begin());
966 InnermostEHScope = scope.getEnclosingEHScope();
967 StartOfData += EHCatchScope::getSizeForNumHandlers(scope.getNumHandlers());
John McCall36f893c2011-01-28 11:13:47 +0000968}
969
970inline void EHScopeStack::popTerminate() {
971 assert(!empty() && "popping exception stack when not empty");
972
John McCall777d6e52011-08-11 02:22:43 +0000973 EHTerminateScope &scope = cast<EHTerminateScope>(*begin());
974 InnermostEHScope = scope.getEnclosingEHScope();
John McCall36f893c2011-01-28 11:13:47 +0000975 StartOfData += EHTerminateScope::getSize();
John McCall36f893c2011-01-28 11:13:47 +0000976}
977
978inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const {
979 assert(sp.isValid() && "finding invalid savepoint");
980 assert(sp.Size <= stable_begin().Size && "finding savepoint after pop");
981 return iterator(EndOfBuffer - sp.Size);
982}
983
984inline EHScopeStack::stable_iterator
985EHScopeStack::stabilize(iterator ir) const {
986 assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer);
987 return stable_iterator(EndOfBuffer - ir.Ptr);
988}
989
John McCall36f893c2011-01-28 11:13:47 +0000990}
991}
992
993#endif