blob: 40a7502973e1b6014a614b289aa465e64dabaf21 [file] [log] [blame]
John McCall36f893c2011-01-28 11:13:47 +00001//===-- CGCleanup.h - Classes for cleanups IR generation --------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// These classes support the generation of LLVM IR for cleanups.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef CLANG_CODEGEN_CGCLEANUP_H
15#define CLANG_CODEGEN_CGCLEANUP_H
16
Reid Kleckner6fe91e42013-06-09 16:45:02 +000017#include "clang/Basic/LLVM.h"
18#include "llvm/ADT/SmallPtrSet.h"
19#include "llvm/ADT/SmallVector.h"
Reid Kleckner875e7392013-06-09 16:56:53 +000020#include "llvm/IR/BasicBlock.h"
21#include "llvm/IR/Value.h"
22#include "llvm/IR/Instructions.h"
John McCall36f893c2011-01-28 11:13:47 +000023
24namespace clang {
25namespace CodeGen {
26
Reid Kleckner6fe91e42013-06-09 16:45:02 +000027class CodeGenFunction;
28
29/// A branch fixup. These are required when emitting a goto to a
30/// label which hasn't been emitted yet. The goto is optimistically
31/// emitted as a branch to the basic block for the label, and (if it
32/// occurs in a scope with non-trivial cleanups) a fixup is added to
33/// the innermost cleanup. When a (normal) cleanup is popped, any
34/// unresolved fixups in that scope are threaded through the cleanup.
35struct BranchFixup {
36 /// The block containing the terminator which needs to be modified
37 /// into a switch if this fixup is resolved into the current scope.
38 /// If null, LatestBranch points directly to the destination.
39 llvm::BasicBlock *OptimisticBranchBlock;
40
41 /// The ultimate destination of the branch.
42 ///
43 /// This can be set to null to indicate that this fixup was
44 /// successfully resolved.
45 llvm::BasicBlock *Destination;
46
47 /// The destination index value.
48 unsigned DestinationIndex;
49
50 /// The initial branch of the fixup.
51 llvm::BranchInst *InitialBranch;
52};
53
54template <class T> struct InvariantValue {
55 typedef T type;
56 typedef T saved_type;
57 static bool needsSaving(type value) { return false; }
58 static saved_type save(CodeGenFunction &CGF, type value) { return value; }
59 static type restore(CodeGenFunction &CGF, saved_type value) { return value; }
60};
61
62/// A metaprogramming class for ensuring that a value will dominate an
63/// arbitrary position in a function.
64template <class T> struct DominatingValue : InvariantValue<T> {};
65
66template <class T, bool mightBeInstruction =
67 llvm::is_base_of<llvm::Value, T>::value &&
68 !llvm::is_base_of<llvm::Constant, T>::value &&
69 !llvm::is_base_of<llvm::BasicBlock, T>::value>
70struct DominatingPointer;
71template <class T> struct DominatingPointer<T,false> : InvariantValue<T*> {};
72// template <class T> struct DominatingPointer<T,true> at end of file
73
74template <class T> struct DominatingValue<T*> : DominatingPointer<T> {};
75
76enum CleanupKind {
77 EHCleanup = 0x1,
78 NormalCleanup = 0x2,
79 NormalAndEHCleanup = EHCleanup | NormalCleanup,
80
81 InactiveCleanup = 0x4,
82 InactiveEHCleanup = EHCleanup | InactiveCleanup,
83 InactiveNormalCleanup = NormalCleanup | InactiveCleanup,
84 InactiveNormalAndEHCleanup = NormalAndEHCleanup | InactiveCleanup
85};
86
87/// A stack of scopes which respond to exceptions, including cleanups
88/// and catch blocks.
89class EHScopeStack {
90public:
91 /// A saved depth on the scope stack. This is necessary because
92 /// pushing scopes onto the stack invalidates iterators.
93 class stable_iterator {
94 friend class EHScopeStack;
95
96 /// Offset from StartOfData to EndOfBuffer.
97 ptrdiff_t Size;
98
99 stable_iterator(ptrdiff_t Size) : Size(Size) {}
100
101 public:
102 static stable_iterator invalid() { return stable_iterator(-1); }
103 stable_iterator() : Size(-1) {}
104
105 bool isValid() const { return Size >= 0; }
106
107 /// Returns true if this scope encloses I.
108 /// Returns false if I is invalid.
109 /// This scope must be valid.
110 bool encloses(stable_iterator I) const { return Size <= I.Size; }
111
112 /// Returns true if this scope strictly encloses I: that is,
113 /// if it encloses I and is not I.
114 /// Returns false is I is invalid.
115 /// This scope must be valid.
116 bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; }
117
118 friend bool operator==(stable_iterator A, stable_iterator B) {
119 return A.Size == B.Size;
120 }
121 friend bool operator!=(stable_iterator A, stable_iterator B) {
122 return A.Size != B.Size;
123 }
124 };
125
126 /// Information for lazily generating a cleanup. Subclasses must be
127 /// POD-like: cleanups will not be destructed, and they will be
128 /// allocated on the cleanup stack and freely copied and moved
129 /// around.
130 ///
131 /// Cleanup implementations should generally be declared in an
132 /// anonymous namespace.
133 class Cleanup {
134 // Anchor the construction vtable.
135 virtual void anchor();
136 public:
137 /// Generation flags.
138 class Flags {
139 enum {
140 F_IsForEH = 0x1,
141 F_IsNormalCleanupKind = 0x2,
142 F_IsEHCleanupKind = 0x4
143 };
144 unsigned flags;
145
146 public:
147 Flags() : flags(0) {}
148
149 /// isForEH - true if the current emission is for an EH cleanup.
150 bool isForEHCleanup() const { return flags & F_IsForEH; }
151 bool isForNormalCleanup() const { return !isForEHCleanup(); }
152 void setIsForEHCleanup() { flags |= F_IsForEH; }
153
154 bool isNormalCleanupKind() const { return flags & F_IsNormalCleanupKind; }
155 void setIsNormalCleanupKind() { flags |= F_IsNormalCleanupKind; }
156
157 /// isEHCleanupKind - true if the cleanup was pushed as an EH
158 /// cleanup.
159 bool isEHCleanupKind() const { return flags & F_IsEHCleanupKind; }
160 void setIsEHCleanupKind() { flags |= F_IsEHCleanupKind; }
161 };
162
163 // Provide a virtual destructor to suppress a very common warning
164 // that unfortunately cannot be suppressed without this. Cleanups
165 // should not rely on this destructor ever being called.
166 virtual ~Cleanup() {}
167
168 /// Emit the cleanup. For normal cleanups, this is run in the
169 /// same EH context as when the cleanup was pushed, i.e. the
170 /// immediately-enclosing context of the cleanup scope. For
171 /// EH cleanups, this is run in a terminate context.
172 ///
173 // \param flags cleanup kind.
174 virtual void Emit(CodeGenFunction &CGF, Flags flags) = 0;
175 };
176
177 /// ConditionalCleanupN stores the saved form of its N parameters,
178 /// then restores them and performs the cleanup.
179 template <class T, class A0>
180 class ConditionalCleanup1 : public Cleanup {
181 typedef typename DominatingValue<A0>::saved_type A0_saved;
182 A0_saved a0_saved;
183
184 void Emit(CodeGenFunction &CGF, Flags flags) {
185 A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
186 T(a0).Emit(CGF, flags);
187 }
188
189 public:
190 ConditionalCleanup1(A0_saved a0)
191 : a0_saved(a0) {}
192 };
193
194 template <class T, class A0, class A1>
195 class ConditionalCleanup2 : public Cleanup {
196 typedef typename DominatingValue<A0>::saved_type A0_saved;
197 typedef typename DominatingValue<A1>::saved_type A1_saved;
198 A0_saved a0_saved;
199 A1_saved a1_saved;
200
201 void Emit(CodeGenFunction &CGF, Flags flags) {
202 A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
203 A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved);
204 T(a0, a1).Emit(CGF, flags);
205 }
206
207 public:
208 ConditionalCleanup2(A0_saved a0, A1_saved a1)
209 : a0_saved(a0), a1_saved(a1) {}
210 };
211
212 template <class T, class A0, class A1, class A2>
213 class ConditionalCleanup3 : public Cleanup {
214 typedef typename DominatingValue<A0>::saved_type A0_saved;
215 typedef typename DominatingValue<A1>::saved_type A1_saved;
216 typedef typename DominatingValue<A2>::saved_type A2_saved;
217 A0_saved a0_saved;
218 A1_saved a1_saved;
219 A2_saved a2_saved;
220
221 void Emit(CodeGenFunction &CGF, Flags flags) {
222 A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
223 A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved);
224 A2 a2 = DominatingValue<A2>::restore(CGF, a2_saved);
225 T(a0, a1, a2).Emit(CGF, flags);
226 }
227
228 public:
229 ConditionalCleanup3(A0_saved a0, A1_saved a1, A2_saved a2)
230 : a0_saved(a0), a1_saved(a1), a2_saved(a2) {}
231 };
232
233 template <class T, class A0, class A1, class A2, class A3>
234 class ConditionalCleanup4 : public Cleanup {
235 typedef typename DominatingValue<A0>::saved_type A0_saved;
236 typedef typename DominatingValue<A1>::saved_type A1_saved;
237 typedef typename DominatingValue<A2>::saved_type A2_saved;
238 typedef typename DominatingValue<A3>::saved_type A3_saved;
239 A0_saved a0_saved;
240 A1_saved a1_saved;
241 A2_saved a2_saved;
242 A3_saved a3_saved;
243
244 void Emit(CodeGenFunction &CGF, Flags flags) {
245 A0 a0 = DominatingValue<A0>::restore(CGF, a0_saved);
246 A1 a1 = DominatingValue<A1>::restore(CGF, a1_saved);
247 A2 a2 = DominatingValue<A2>::restore(CGF, a2_saved);
248 A3 a3 = DominatingValue<A3>::restore(CGF, a3_saved);
249 T(a0, a1, a2, a3).Emit(CGF, flags);
250 }
251
252 public:
253 ConditionalCleanup4(A0_saved a0, A1_saved a1, A2_saved a2, A3_saved a3)
254 : a0_saved(a0), a1_saved(a1), a2_saved(a2), a3_saved(a3) {}
255 };
256
257private:
258 // The implementation for this class is in CGException.h and
259 // CGException.cpp; the definition is here because it's used as a
260 // member of CodeGenFunction.
261
262 /// The start of the scope-stack buffer, i.e. the allocated pointer
263 /// for the buffer. All of these pointers are either simultaneously
264 /// null or simultaneously valid.
265 char *StartOfBuffer;
266
267 /// The end of the buffer.
268 char *EndOfBuffer;
269
270 /// The first valid entry in the buffer.
271 char *StartOfData;
272
273 /// The innermost normal cleanup on the stack.
274 stable_iterator InnermostNormalCleanup;
275
276 /// The innermost EH scope on the stack.
277 stable_iterator InnermostEHScope;
278
279 /// The current set of branch fixups. A branch fixup is a jump to
280 /// an as-yet unemitted label, i.e. a label for which we don't yet
281 /// know the EH stack depth. Whenever we pop a cleanup, we have
282 /// to thread all the current branch fixups through it.
283 ///
284 /// Fixups are recorded as the Use of the respective branch or
285 /// switch statement. The use points to the final destination.
286 /// When popping out of a cleanup, these uses are threaded through
287 /// the cleanup and adjusted to point to the new cleanup.
288 ///
289 /// Note that branches are allowed to jump into protected scopes
290 /// in certain situations; e.g. the following code is legal:
291 /// struct A { ~A(); }; // trivial ctor, non-trivial dtor
292 /// goto foo;
293 /// A a;
294 /// foo:
295 /// bar();
296 SmallVector<BranchFixup, 8> BranchFixups;
297
298 char *allocate(size_t Size);
299
300 void *pushCleanup(CleanupKind K, size_t DataSize);
301
302public:
303 EHScopeStack() : StartOfBuffer(0), EndOfBuffer(0), StartOfData(0),
304 InnermostNormalCleanup(stable_end()),
305 InnermostEHScope(stable_end()) {}
306 ~EHScopeStack() { delete[] StartOfBuffer; }
307
308 // Variadic templates would make this not terrible.
309
310 /// Push a lazily-created cleanup on the stack.
311 template <class T>
312 void pushCleanup(CleanupKind Kind) {
313 void *Buffer = pushCleanup(Kind, sizeof(T));
314 Cleanup *Obj = new(Buffer) T();
315 (void) Obj;
316 }
317
318 /// Push a lazily-created cleanup on the stack.
319 template <class T, class A0>
320 void pushCleanup(CleanupKind Kind, A0 a0) {
321 void *Buffer = pushCleanup(Kind, sizeof(T));
322 Cleanup *Obj = new(Buffer) T(a0);
323 (void) Obj;
324 }
325
326 /// Push a lazily-created cleanup on the stack.
327 template <class T, class A0, class A1>
328 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1) {
329 void *Buffer = pushCleanup(Kind, sizeof(T));
330 Cleanup *Obj = new(Buffer) T(a0, a1);
331 (void) Obj;
332 }
333
334 /// Push a lazily-created cleanup on the stack.
335 template <class T, class A0, class A1, class A2>
336 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2) {
337 void *Buffer = pushCleanup(Kind, sizeof(T));
338 Cleanup *Obj = new(Buffer) T(a0, a1, a2);
339 (void) Obj;
340 }
341
342 /// Push a lazily-created cleanup on the stack.
343 template <class T, class A0, class A1, class A2, class A3>
344 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3) {
345 void *Buffer = pushCleanup(Kind, sizeof(T));
346 Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3);
347 (void) Obj;
348 }
349
350 /// Push a lazily-created cleanup on the stack.
351 template <class T, class A0, class A1, class A2, class A3, class A4>
352 void pushCleanup(CleanupKind Kind, A0 a0, A1 a1, A2 a2, A3 a3, A4 a4) {
353 void *Buffer = pushCleanup(Kind, sizeof(T));
354 Cleanup *Obj = new(Buffer) T(a0, a1, a2, a3, a4);
355 (void) Obj;
356 }
357
358 // Feel free to add more variants of the following:
359
360 /// Push a cleanup with non-constant storage requirements on the
361 /// stack. The cleanup type must provide an additional static method:
362 /// static size_t getExtraSize(size_t);
363 /// The argument to this method will be the value N, which will also
364 /// be passed as the first argument to the constructor.
365 ///
366 /// The data stored in the extra storage must obey the same
367 /// restrictions as normal cleanup member data.
368 ///
369 /// The pointer returned from this method is valid until the cleanup
370 /// stack is modified.
371 template <class T, class A0, class A1, class A2>
372 T *pushCleanupWithExtra(CleanupKind Kind, size_t N, A0 a0, A1 a1, A2 a2) {
373 void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N));
374 return new (Buffer) T(N, a0, a1, a2);
375 }
376
377 /// Pops a cleanup scope off the stack. This is private to CGCleanup.cpp.
378 void popCleanup();
379
380 /// Push a set of catch handlers on the stack. The catch is
381 /// uninitialized and will need to have the given number of handlers
382 /// set on it.
383 class EHCatchScope *pushCatch(unsigned NumHandlers);
384
385 /// Pops a catch scope off the stack. This is private to CGException.cpp.
386 void popCatch();
387
388 /// Push an exceptions filter on the stack.
389 class EHFilterScope *pushFilter(unsigned NumFilters);
390
391 /// Pops an exceptions filter off the stack.
392 void popFilter();
393
394 /// Push a terminate handler on the stack.
395 void pushTerminate();
396
397 /// Pops a terminate handler off the stack.
398 void popTerminate();
399
400 /// Determines whether the exception-scopes stack is empty.
401 bool empty() const { return StartOfData == EndOfBuffer; }
402
403 bool requiresLandingPad() const {
404 return InnermostEHScope != stable_end();
405 }
406
407 /// Determines whether there are any normal cleanups on the stack.
408 bool hasNormalCleanups() const {
409 return InnermostNormalCleanup != stable_end();
410 }
411
412 /// Returns the innermost normal cleanup on the stack, or
413 /// stable_end() if there are no normal cleanups.
414 stable_iterator getInnermostNormalCleanup() const {
415 return InnermostNormalCleanup;
416 }
417 stable_iterator getInnermostActiveNormalCleanup() const;
418
419 stable_iterator getInnermostEHScope() const {
420 return InnermostEHScope;
421 }
422
423 stable_iterator getInnermostActiveEHScope() const;
424
425 /// An unstable reference to a scope-stack depth. Invalidated by
426 /// pushes but not pops.
427 class iterator;
428
429 /// Returns an iterator pointing to the innermost EH scope.
430 iterator begin() const;
431
432 /// Returns an iterator pointing to the outermost EH scope.
433 iterator end() const;
434
435 /// Create a stable reference to the top of the EH stack. The
436 /// returned reference is valid until that scope is popped off the
437 /// stack.
438 stable_iterator stable_begin() const {
439 return stable_iterator(EndOfBuffer - StartOfData);
440 }
441
442 /// Create a stable reference to the bottom of the EH stack.
443 static stable_iterator stable_end() {
444 return stable_iterator(0);
445 }
446
447 /// Translates an iterator into a stable_iterator.
448 stable_iterator stabilize(iterator it) const;
449
450 /// Turn a stable reference to a scope depth into a unstable pointer
451 /// to the EH stack.
452 iterator find(stable_iterator save) const;
453
454 /// Removes the cleanup pointed to by the given stable_iterator.
455 void removeCleanup(stable_iterator save);
456
457 /// Add a branch fixup to the current cleanup scope.
458 BranchFixup &addBranchFixup() {
459 assert(hasNormalCleanups() && "adding fixup in scope without cleanups");
460 BranchFixups.push_back(BranchFixup());
461 return BranchFixups.back();
462 }
463
464 unsigned getNumBranchFixups() const { return BranchFixups.size(); }
465 BranchFixup &getBranchFixup(unsigned I) {
466 assert(I < getNumBranchFixups());
467 return BranchFixups[I];
468 }
469
470 /// Pops lazily-removed fixups from the end of the list. This
471 /// should only be called by procedures which have just popped a
472 /// cleanup or resolved one or more fixups.
473 void popNullFixups();
474
475 /// Clears the branch-fixups list. This should only be called by
476 /// ResolveAllBranchFixups.
477 void clearFixups() { BranchFixups.clear(); }
478};
479
John McCall36f893c2011-01-28 11:13:47 +0000480/// A protected scope for zero-cost EH handling.
481class EHScope {
482 llvm::BasicBlock *CachedLandingPad;
John McCall777d6e52011-08-11 02:22:43 +0000483 llvm::BasicBlock *CachedEHDispatchBlock;
John McCall36f893c2011-01-28 11:13:47 +0000484
John McCall777d6e52011-08-11 02:22:43 +0000485 EHScopeStack::stable_iterator EnclosingEHScope;
486
487 class CommonBitFields {
488 friend class EHScope;
489 unsigned Kind : 2;
490 };
491 enum { NumCommonBits = 2 };
John McCall36f893c2011-01-28 11:13:47 +0000492
493protected:
John McCall777d6e52011-08-11 02:22:43 +0000494 class CatchBitFields {
495 friend class EHCatchScope;
496 unsigned : NumCommonBits;
497
498 unsigned NumHandlers : 32 - NumCommonBits;
499 };
500
501 class CleanupBitFields {
502 friend class EHCleanupScope;
503 unsigned : NumCommonBits;
504
505 /// Whether this cleanup needs to be run along normal edges.
506 unsigned IsNormalCleanup : 1;
507
508 /// Whether this cleanup needs to be run along exception edges.
509 unsigned IsEHCleanup : 1;
510
511 /// Whether this cleanup is currently active.
512 unsigned IsActive : 1;
513
514 /// Whether the normal cleanup should test the activation flag.
515 unsigned TestFlagInNormalCleanup : 1;
516
517 /// Whether the EH cleanup should test the activation flag.
518 unsigned TestFlagInEHCleanup : 1;
519
520 /// The amount of extra storage needed by the Cleanup.
521 /// Always a multiple of the scope-stack alignment.
522 unsigned CleanupSize : 12;
523
524 /// The number of fixups required by enclosing scopes (not including
525 /// this one). If this is the top cleanup scope, all the fixups
526 /// from this index onwards belong to this scope.
527 unsigned FixupDepth : 32 - 17 - NumCommonBits; // currently 13
528 };
529
530 class FilterBitFields {
531 friend class EHFilterScope;
532 unsigned : NumCommonBits;
533
534 unsigned NumFilters : 32 - NumCommonBits;
535 };
536
537 union {
538 CommonBitFields CommonBits;
539 CatchBitFields CatchBits;
540 CleanupBitFields CleanupBits;
541 FilterBitFields FilterBits;
542 };
John McCall36f893c2011-01-28 11:13:47 +0000543
544public:
545 enum Kind { Cleanup, Catch, Terminate, Filter };
546
John McCall777d6e52011-08-11 02:22:43 +0000547 EHScope(Kind kind, EHScopeStack::stable_iterator enclosingEHScope)
548 : CachedLandingPad(0), CachedEHDispatchBlock(0),
549 EnclosingEHScope(enclosingEHScope) {
550 CommonBits.Kind = kind;
551 }
John McCall36f893c2011-01-28 11:13:47 +0000552
John McCall777d6e52011-08-11 02:22:43 +0000553 Kind getKind() const { return static_cast<Kind>(CommonBits.Kind); }
John McCall36f893c2011-01-28 11:13:47 +0000554
555 llvm::BasicBlock *getCachedLandingPad() const {
556 return CachedLandingPad;
557 }
558
John McCall777d6e52011-08-11 02:22:43 +0000559 void setCachedLandingPad(llvm::BasicBlock *block) {
560 CachedLandingPad = block;
561 }
562
563 llvm::BasicBlock *getCachedEHDispatchBlock() const {
564 return CachedEHDispatchBlock;
565 }
566
567 void setCachedEHDispatchBlock(llvm::BasicBlock *block) {
568 CachedEHDispatchBlock = block;
569 }
570
571 bool hasEHBranches() const {
572 if (llvm::BasicBlock *block = getCachedEHDispatchBlock())
573 return !block->use_empty();
574 return false;
575 }
576
577 EHScopeStack::stable_iterator getEnclosingEHScope() const {
578 return EnclosingEHScope;
John McCall36f893c2011-01-28 11:13:47 +0000579 }
580};
581
582/// A scope which attempts to handle some, possibly all, types of
583/// exceptions.
584///
James Dennett2ee5ba32012-06-15 22:10:14 +0000585/// Objective C \@finally blocks are represented using a cleanup scope
John McCall36f893c2011-01-28 11:13:47 +0000586/// after the catch scope.
587class EHCatchScope : public EHScope {
John McCall36f893c2011-01-28 11:13:47 +0000588 // In effect, we have a flexible array member
589 // Handler Handlers[0];
590 // But that's only standard in C99, not C++, so we have to do
591 // annoying pointer arithmetic instead.
592
593public:
594 struct Handler {
595 /// A type info value, or null (C++ null, not an LLVM null pointer)
596 /// for a catch-all.
597 llvm::Value *Type;
598
599 /// The catch handler for this type.
600 llvm::BasicBlock *Block;
601
John McCall777d6e52011-08-11 02:22:43 +0000602 bool isCatchAll() const { return Type == 0; }
John McCall36f893c2011-01-28 11:13:47 +0000603 };
604
605private:
606 friend class EHScopeStack;
607
608 Handler *getHandlers() {
609 return reinterpret_cast<Handler*>(this+1);
610 }
611
612 const Handler *getHandlers() const {
613 return reinterpret_cast<const Handler*>(this+1);
614 }
615
616public:
617 static size_t getSizeForNumHandlers(unsigned N) {
618 return sizeof(EHCatchScope) + N * sizeof(Handler);
619 }
620
John McCall777d6e52011-08-11 02:22:43 +0000621 EHCatchScope(unsigned numHandlers,
622 EHScopeStack::stable_iterator enclosingEHScope)
623 : EHScope(Catch, enclosingEHScope) {
624 CatchBits.NumHandlers = numHandlers;
John McCall36f893c2011-01-28 11:13:47 +0000625 }
626
627 unsigned getNumHandlers() const {
John McCall777d6e52011-08-11 02:22:43 +0000628 return CatchBits.NumHandlers;
John McCall36f893c2011-01-28 11:13:47 +0000629 }
630
631 void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) {
632 setHandler(I, /*catchall*/ 0, Block);
633 }
634
635 void setHandler(unsigned I, llvm::Value *Type, llvm::BasicBlock *Block) {
636 assert(I < getNumHandlers());
637 getHandlers()[I].Type = Type;
638 getHandlers()[I].Block = Block;
639 }
640
641 const Handler &getHandler(unsigned I) const {
642 assert(I < getNumHandlers());
643 return getHandlers()[I];
644 }
645
646 typedef const Handler *iterator;
647 iterator begin() const { return getHandlers(); }
648 iterator end() const { return getHandlers() + getNumHandlers(); }
649
650 static bool classof(const EHScope *Scope) {
651 return Scope->getKind() == Catch;
652 }
653};
654
655/// A cleanup scope which generates the cleanup blocks lazily.
656class EHCleanupScope : public EHScope {
John McCall36f893c2011-01-28 11:13:47 +0000657 /// The nearest normal cleanup scope enclosing this one.
658 EHScopeStack::stable_iterator EnclosingNormal;
659
John McCall777d6e52011-08-11 02:22:43 +0000660 /// The nearest EH scope enclosing this one.
John McCall36f893c2011-01-28 11:13:47 +0000661 EHScopeStack::stable_iterator EnclosingEH;
662
663 /// The dual entry/exit block along the normal edge. This is lazily
664 /// created if needed before the cleanup is popped.
665 llvm::BasicBlock *NormalBlock;
666
John McCall36f893c2011-01-28 11:13:47 +0000667 /// An optional i1 variable indicating whether this cleanup has been
668 /// activated yet.
669 llvm::AllocaInst *ActiveFlag;
670
671 /// Extra information required for cleanups that have resolved
672 /// branches through them. This has to be allocated on the side
673 /// because everything on the cleanup stack has be trivially
674 /// movable.
675 struct ExtInfo {
676 /// The destinations of normal branch-afters and branch-throughs.
677 llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches;
678
679 /// Normal branch-afters.
Chris Lattner686775d2011-07-20 06:58:45 +0000680 SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
John McCall36f893c2011-01-28 11:13:47 +0000681 BranchAfters;
John McCall36f893c2011-01-28 11:13:47 +0000682 };
683 mutable struct ExtInfo *ExtInfo;
684
685 struct ExtInfo &getExtInfo() {
686 if (!ExtInfo) ExtInfo = new struct ExtInfo();
687 return *ExtInfo;
688 }
689
690 const struct ExtInfo &getExtInfo() const {
691 if (!ExtInfo) ExtInfo = new struct ExtInfo();
692 return *ExtInfo;
693 }
694
695public:
696 /// Gets the size required for a lazy cleanup scope with the given
697 /// cleanup-data requirements.
698 static size_t getSizeForCleanupSize(size_t Size) {
699 return sizeof(EHCleanupScope) + Size;
700 }
701
702 size_t getAllocatedSize() const {
John McCall777d6e52011-08-11 02:22:43 +0000703 return sizeof(EHCleanupScope) + CleanupBits.CleanupSize;
John McCall36f893c2011-01-28 11:13:47 +0000704 }
705
John McCall777d6e52011-08-11 02:22:43 +0000706 EHCleanupScope(bool isNormal, bool isEH, bool isActive,
707 unsigned cleanupSize, unsigned fixupDepth,
708 EHScopeStack::stable_iterator enclosingNormal,
709 EHScopeStack::stable_iterator enclosingEH)
710 : EHScope(EHScope::Cleanup, enclosingEH), EnclosingNormal(enclosingNormal),
711 NormalBlock(0), ActiveFlag(0), ExtInfo(0) {
712 CleanupBits.IsNormalCleanup = isNormal;
713 CleanupBits.IsEHCleanup = isEH;
714 CleanupBits.IsActive = isActive;
715 CleanupBits.TestFlagInNormalCleanup = false;
716 CleanupBits.TestFlagInEHCleanup = false;
717 CleanupBits.CleanupSize = cleanupSize;
718 CleanupBits.FixupDepth = fixupDepth;
719
720 assert(CleanupBits.CleanupSize == cleanupSize && "cleanup size overflow");
John McCall36f893c2011-01-28 11:13:47 +0000721 }
722
723 ~EHCleanupScope() {
724 delete ExtInfo;
725 }
726
John McCall777d6e52011-08-11 02:22:43 +0000727 bool isNormalCleanup() const { return CleanupBits.IsNormalCleanup; }
John McCall36f893c2011-01-28 11:13:47 +0000728 llvm::BasicBlock *getNormalBlock() const { return NormalBlock; }
729 void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; }
730
John McCall777d6e52011-08-11 02:22:43 +0000731 bool isEHCleanup() const { return CleanupBits.IsEHCleanup; }
732 llvm::BasicBlock *getEHBlock() const { return getCachedEHDispatchBlock(); }
733 void setEHBlock(llvm::BasicBlock *BB) { setCachedEHDispatchBlock(BB); }
John McCall36f893c2011-01-28 11:13:47 +0000734
John McCall777d6e52011-08-11 02:22:43 +0000735 bool isActive() const { return CleanupBits.IsActive; }
736 void setActive(bool A) { CleanupBits.IsActive = A; }
John McCall36f893c2011-01-28 11:13:47 +0000737
738 llvm::AllocaInst *getActiveFlag() const { return ActiveFlag; }
739 void setActiveFlag(llvm::AllocaInst *Var) { ActiveFlag = Var; }
740
John McCall777d6e52011-08-11 02:22:43 +0000741 void setTestFlagInNormalCleanup() {
742 CleanupBits.TestFlagInNormalCleanup = true;
743 }
744 bool shouldTestFlagInNormalCleanup() const {
745 return CleanupBits.TestFlagInNormalCleanup;
746 }
John McCall36f893c2011-01-28 11:13:47 +0000747
John McCall777d6e52011-08-11 02:22:43 +0000748 void setTestFlagInEHCleanup() {
749 CleanupBits.TestFlagInEHCleanup = true;
750 }
751 bool shouldTestFlagInEHCleanup() const {
752 return CleanupBits.TestFlagInEHCleanup;
753 }
John McCall36f893c2011-01-28 11:13:47 +0000754
John McCall777d6e52011-08-11 02:22:43 +0000755 unsigned getFixupDepth() const { return CleanupBits.FixupDepth; }
John McCall36f893c2011-01-28 11:13:47 +0000756 EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
757 return EnclosingNormal;
758 }
John McCall36f893c2011-01-28 11:13:47 +0000759
John McCall777d6e52011-08-11 02:22:43 +0000760 size_t getCleanupSize() const { return CleanupBits.CleanupSize; }
John McCall36f893c2011-01-28 11:13:47 +0000761 void *getCleanupBuffer() { return this + 1; }
762
763 EHScopeStack::Cleanup *getCleanup() {
764 return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
765 }
766
767 /// True if this cleanup scope has any branch-afters or branch-throughs.
768 bool hasBranches() const { return ExtInfo && !ExtInfo->Branches.empty(); }
769
770 /// Add a branch-after to this cleanup scope. A branch-after is a
771 /// branch from a point protected by this (normal) cleanup to a
772 /// point in the normal cleanup scope immediately containing it.
773 /// For example,
774 /// for (;;) { A a; break; }
775 /// contains a branch-after.
776 ///
777 /// Branch-afters each have their own destination out of the
778 /// cleanup, guaranteed distinct from anything else threaded through
779 /// it. Therefore branch-afters usually force a switch after the
780 /// cleanup.
781 void addBranchAfter(llvm::ConstantInt *Index,
782 llvm::BasicBlock *Block) {
783 struct ExtInfo &ExtInfo = getExtInfo();
784 if (ExtInfo.Branches.insert(Block))
785 ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index));
786 }
787
788 /// Return the number of unique branch-afters on this scope.
789 unsigned getNumBranchAfters() const {
790 return ExtInfo ? ExtInfo->BranchAfters.size() : 0;
791 }
792
793 llvm::BasicBlock *getBranchAfterBlock(unsigned I) const {
794 assert(I < getNumBranchAfters());
795 return ExtInfo->BranchAfters[I].first;
796 }
797
798 llvm::ConstantInt *getBranchAfterIndex(unsigned I) const {
799 assert(I < getNumBranchAfters());
800 return ExtInfo->BranchAfters[I].second;
801 }
802
803 /// Add a branch-through to this cleanup scope. A branch-through is
804 /// a branch from a scope protected by this (normal) cleanup to an
805 /// enclosing scope other than the immediately-enclosing normal
806 /// cleanup scope.
807 ///
808 /// In the following example, the branch through B's scope is a
809 /// branch-through, while the branch through A's scope is a
810 /// branch-after:
811 /// for (;;) { A a; B b; break; }
812 ///
813 /// All branch-throughs have a common destination out of the
814 /// cleanup, one possibly shared with the fall-through. Therefore
815 /// branch-throughs usually don't force a switch after the cleanup.
816 ///
817 /// \return true if the branch-through was new to this scope
818 bool addBranchThrough(llvm::BasicBlock *Block) {
819 return getExtInfo().Branches.insert(Block);
820 }
821
822 /// Determines if this cleanup scope has any branch throughs.
823 bool hasBranchThroughs() const {
824 if (!ExtInfo) return false;
825 return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size());
826 }
827
John McCall36f893c2011-01-28 11:13:47 +0000828 static bool classof(const EHScope *Scope) {
829 return (Scope->getKind() == Cleanup);
830 }
831};
832
833/// An exceptions scope which filters exceptions thrown through it.
834/// Only exceptions matching the filter types will be permitted to be
835/// thrown.
836///
837/// This is used to implement C++ exception specifications.
838class EHFilterScope : public EHScope {
John McCall36f893c2011-01-28 11:13:47 +0000839 // Essentially ends in a flexible array member:
840 // llvm::Value *FilterTypes[0];
841
842 llvm::Value **getFilters() {
843 return reinterpret_cast<llvm::Value**>(this+1);
844 }
845
846 llvm::Value * const *getFilters() const {
847 return reinterpret_cast<llvm::Value* const *>(this+1);
848 }
849
850public:
John McCall777d6e52011-08-11 02:22:43 +0000851 EHFilterScope(unsigned numFilters)
852 : EHScope(Filter, EHScopeStack::stable_end()) {
853 FilterBits.NumFilters = numFilters;
John McCall36f893c2011-01-28 11:13:47 +0000854 }
855
John McCall777d6e52011-08-11 02:22:43 +0000856 static size_t getSizeForNumFilters(unsigned numFilters) {
857 return sizeof(EHFilterScope) + numFilters * sizeof(llvm::Value*);
John McCall36f893c2011-01-28 11:13:47 +0000858 }
859
John McCall777d6e52011-08-11 02:22:43 +0000860 unsigned getNumFilters() const { return FilterBits.NumFilters; }
861
862 void setFilter(unsigned i, llvm::Value *filterValue) {
863 assert(i < getNumFilters());
864 getFilters()[i] = filterValue;
John McCall36f893c2011-01-28 11:13:47 +0000865 }
866
John McCall777d6e52011-08-11 02:22:43 +0000867 llvm::Value *getFilter(unsigned i) const {
868 assert(i < getNumFilters());
869 return getFilters()[i];
870 }
871
872 static bool classof(const EHScope *scope) {
873 return scope->getKind() == Filter;
John McCall36f893c2011-01-28 11:13:47 +0000874 }
875};
876
877/// An exceptions scope which calls std::terminate if any exception
878/// reaches it.
879class EHTerminateScope : public EHScope {
John McCall36f893c2011-01-28 11:13:47 +0000880public:
John McCall777d6e52011-08-11 02:22:43 +0000881 EHTerminateScope(EHScopeStack::stable_iterator enclosingEHScope)
882 : EHScope(Terminate, enclosingEHScope) {}
John McCall36f893c2011-01-28 11:13:47 +0000883 static size_t getSize() { return sizeof(EHTerminateScope); }
884
John McCall777d6e52011-08-11 02:22:43 +0000885 static bool classof(const EHScope *scope) {
886 return scope->getKind() == Terminate;
John McCall36f893c2011-01-28 11:13:47 +0000887 }
888};
889
890/// A non-stable pointer into the scope stack.
891class EHScopeStack::iterator {
892 char *Ptr;
893
894 friend class EHScopeStack;
895 explicit iterator(char *Ptr) : Ptr(Ptr) {}
896
897public:
898 iterator() : Ptr(0) {}
899
900 EHScope *get() const {
901 return reinterpret_cast<EHScope*>(Ptr);
902 }
903
904 EHScope *operator->() const { return get(); }
905 EHScope &operator*() const { return *get(); }
906
907 iterator &operator++() {
908 switch (get()->getKind()) {
909 case EHScope::Catch:
910 Ptr += EHCatchScope::getSizeForNumHandlers(
911 static_cast<const EHCatchScope*>(get())->getNumHandlers());
912 break;
913
914 case EHScope::Filter:
915 Ptr += EHFilterScope::getSizeForNumFilters(
916 static_cast<const EHFilterScope*>(get())->getNumFilters());
917 break;
918
919 case EHScope::Cleanup:
920 Ptr += static_cast<const EHCleanupScope*>(get())
921 ->getAllocatedSize();
922 break;
923
924 case EHScope::Terminate:
925 Ptr += EHTerminateScope::getSize();
926 break;
927 }
928
929 return *this;
930 }
931
932 iterator next() {
933 iterator copy = *this;
934 ++copy;
935 return copy;
936 }
937
938 iterator operator++(int) {
939 iterator copy = *this;
940 operator++();
941 return copy;
942 }
943
944 bool encloses(iterator other) const { return Ptr >= other.Ptr; }
945 bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; }
946
947 bool operator==(iterator other) const { return Ptr == other.Ptr; }
948 bool operator!=(iterator other) const { return Ptr != other.Ptr; }
949};
950
951inline EHScopeStack::iterator EHScopeStack::begin() const {
952 return iterator(StartOfData);
953}
954
955inline EHScopeStack::iterator EHScopeStack::end() const {
956 return iterator(EndOfBuffer);
957}
958
959inline void EHScopeStack::popCatch() {
960 assert(!empty() && "popping exception stack when not empty");
961
John McCall777d6e52011-08-11 02:22:43 +0000962 EHCatchScope &scope = cast<EHCatchScope>(*begin());
963 InnermostEHScope = scope.getEnclosingEHScope();
964 StartOfData += EHCatchScope::getSizeForNumHandlers(scope.getNumHandlers());
John McCall36f893c2011-01-28 11:13:47 +0000965}
966
967inline void EHScopeStack::popTerminate() {
968 assert(!empty() && "popping exception stack when not empty");
969
John McCall777d6e52011-08-11 02:22:43 +0000970 EHTerminateScope &scope = cast<EHTerminateScope>(*begin());
971 InnermostEHScope = scope.getEnclosingEHScope();
John McCall36f893c2011-01-28 11:13:47 +0000972 StartOfData += EHTerminateScope::getSize();
John McCall36f893c2011-01-28 11:13:47 +0000973}
974
975inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const {
976 assert(sp.isValid() && "finding invalid savepoint");
977 assert(sp.Size <= stable_begin().Size && "finding savepoint after pop");
978 return iterator(EndOfBuffer - sp.Size);
979}
980
981inline EHScopeStack::stable_iterator
982EHScopeStack::stabilize(iterator ir) const {
983 assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer);
984 return stable_iterator(EndOfBuffer - ir.Ptr);
985}
986
John McCall36f893c2011-01-28 11:13:47 +0000987}
988}
989
990#endif