blob: c93ec5bb76a97145b49a5677ad0c47e434587824 [file] [log] [blame]
John McCall36f893c2011-01-28 11:13:47 +00001//===-- CGCleanup.h - Classes for cleanups IR generation --------*- C++ -*-===//
2//
3// The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// These classes support the generation of LLVM IR for cleanups.
11//
12//===----------------------------------------------------------------------===//
13
14#ifndef CLANG_CODEGEN_CGCLEANUP_H
15#define CLANG_CODEGEN_CGCLEANUP_H
16
17/// EHScopeStack is defined in CodeGenFunction.h, but its
18/// implementation is in this file and in CGCleanup.cpp.
19#include "CodeGenFunction.h"
20
21namespace llvm {
22 class Value;
23 class BasicBlock;
24}
25
26namespace clang {
27namespace CodeGen {
28
29/// A protected scope for zero-cost EH handling.
30class EHScope {
31 llvm::BasicBlock *CachedLandingPad;
32
33 unsigned K : 2;
34
35protected:
36 enum { BitsRemaining = 30 };
37
38public:
39 enum Kind { Cleanup, Catch, Terminate, Filter };
40
41 EHScope(Kind K) : CachedLandingPad(0), K(K) {}
42
43 Kind getKind() const { return static_cast<Kind>(K); }
44
45 llvm::BasicBlock *getCachedLandingPad() const {
46 return CachedLandingPad;
47 }
48
49 void setCachedLandingPad(llvm::BasicBlock *Block) {
50 CachedLandingPad = Block;
51 }
52};
53
54/// A scope which attempts to handle some, possibly all, types of
55/// exceptions.
56///
57/// Objective C @finally blocks are represented using a cleanup scope
58/// after the catch scope.
59class EHCatchScope : public EHScope {
60 unsigned NumHandlers : BitsRemaining;
61
62 // In effect, we have a flexible array member
63 // Handler Handlers[0];
64 // But that's only standard in C99, not C++, so we have to do
65 // annoying pointer arithmetic instead.
66
67public:
68 struct Handler {
69 /// A type info value, or null (C++ null, not an LLVM null pointer)
70 /// for a catch-all.
71 llvm::Value *Type;
72
73 /// The catch handler for this type.
74 llvm::BasicBlock *Block;
75
76 /// The unwind destination index for this handler.
77 unsigned Index;
78 };
79
80private:
81 friend class EHScopeStack;
82
83 Handler *getHandlers() {
84 return reinterpret_cast<Handler*>(this+1);
85 }
86
87 const Handler *getHandlers() const {
88 return reinterpret_cast<const Handler*>(this+1);
89 }
90
91public:
92 static size_t getSizeForNumHandlers(unsigned N) {
93 return sizeof(EHCatchScope) + N * sizeof(Handler);
94 }
95
96 EHCatchScope(unsigned NumHandlers)
97 : EHScope(Catch), NumHandlers(NumHandlers) {
98 }
99
100 unsigned getNumHandlers() const {
101 return NumHandlers;
102 }
103
104 void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) {
105 setHandler(I, /*catchall*/ 0, Block);
106 }
107
108 void setHandler(unsigned I, llvm::Value *Type, llvm::BasicBlock *Block) {
109 assert(I < getNumHandlers());
110 getHandlers()[I].Type = Type;
111 getHandlers()[I].Block = Block;
112 }
113
114 const Handler &getHandler(unsigned I) const {
115 assert(I < getNumHandlers());
116 return getHandlers()[I];
117 }
118
119 typedef const Handler *iterator;
120 iterator begin() const { return getHandlers(); }
121 iterator end() const { return getHandlers() + getNumHandlers(); }
122
123 static bool classof(const EHScope *Scope) {
124 return Scope->getKind() == Catch;
125 }
126};
127
128/// A cleanup scope which generates the cleanup blocks lazily.
129class EHCleanupScope : public EHScope {
130 /// Whether this cleanup needs to be run along normal edges.
131 bool IsNormalCleanup : 1;
132
133 /// Whether this cleanup needs to be run along exception edges.
134 bool IsEHCleanup : 1;
135
136 /// Whether this cleanup is currently active.
137 bool IsActive : 1;
138
139 /// Whether the normal cleanup should test the activation flag.
140 bool TestFlagInNormalCleanup : 1;
141
142 /// Whether the EH cleanup should test the activation flag.
143 bool TestFlagInEHCleanup : 1;
144
145 /// The amount of extra storage needed by the Cleanup.
146 /// Always a multiple of the scope-stack alignment.
147 unsigned CleanupSize : 12;
148
149 /// The number of fixups required by enclosing scopes (not including
150 /// this one). If this is the top cleanup scope, all the fixups
151 /// from this index onwards belong to this scope.
152 unsigned FixupDepth : BitsRemaining - 17; // currently 13
153
154 /// The nearest normal cleanup scope enclosing this one.
155 EHScopeStack::stable_iterator EnclosingNormal;
156
157 /// The nearest EH cleanup scope enclosing this one.
158 EHScopeStack::stable_iterator EnclosingEH;
159
160 /// The dual entry/exit block along the normal edge. This is lazily
161 /// created if needed before the cleanup is popped.
162 llvm::BasicBlock *NormalBlock;
163
164 /// The dual entry/exit block along the EH edge. This is lazily
165 /// created if needed before the cleanup is popped.
166 llvm::BasicBlock *EHBlock;
167
168 /// An optional i1 variable indicating whether this cleanup has been
169 /// activated yet.
170 llvm::AllocaInst *ActiveFlag;
171
172 /// Extra information required for cleanups that have resolved
173 /// branches through them. This has to be allocated on the side
174 /// because everything on the cleanup stack has be trivially
175 /// movable.
176 struct ExtInfo {
177 /// The destinations of normal branch-afters and branch-throughs.
178 llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches;
179
180 /// Normal branch-afters.
181 llvm::SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
182 BranchAfters;
183
184 /// The destinations of EH branch-afters and branch-throughs.
185 /// TODO: optimize for the extremely common case of a single
186 /// branch-through.
187 llvm::SmallPtrSet<llvm::BasicBlock*, 4> EHBranches;
188
189 /// EH branch-afters.
190 llvm::SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4>
191 EHBranchAfters;
192 };
193 mutable struct ExtInfo *ExtInfo;
194
195 struct ExtInfo &getExtInfo() {
196 if (!ExtInfo) ExtInfo = new struct ExtInfo();
197 return *ExtInfo;
198 }
199
200 const struct ExtInfo &getExtInfo() const {
201 if (!ExtInfo) ExtInfo = new struct ExtInfo();
202 return *ExtInfo;
203 }
204
205public:
206 /// Gets the size required for a lazy cleanup scope with the given
207 /// cleanup-data requirements.
208 static size_t getSizeForCleanupSize(size_t Size) {
209 return sizeof(EHCleanupScope) + Size;
210 }
211
212 size_t getAllocatedSize() const {
213 return sizeof(EHCleanupScope) + CleanupSize;
214 }
215
216 EHCleanupScope(bool IsNormal, bool IsEH, bool IsActive,
217 unsigned CleanupSize, unsigned FixupDepth,
218 EHScopeStack::stable_iterator EnclosingNormal,
219 EHScopeStack::stable_iterator EnclosingEH)
220 : EHScope(EHScope::Cleanup),
221 IsNormalCleanup(IsNormal), IsEHCleanup(IsEH), IsActive(IsActive),
222 TestFlagInNormalCleanup(false), TestFlagInEHCleanup(false),
223 CleanupSize(CleanupSize), FixupDepth(FixupDepth),
224 EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH),
225 NormalBlock(0), EHBlock(0), ActiveFlag(0), ExtInfo(0)
226 {
227 assert(this->CleanupSize == CleanupSize && "cleanup size overflow");
228 }
229
230 ~EHCleanupScope() {
231 delete ExtInfo;
232 }
233
234 bool isNormalCleanup() const { return IsNormalCleanup; }
235 llvm::BasicBlock *getNormalBlock() const { return NormalBlock; }
236 void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; }
237
238 bool isEHCleanup() const { return IsEHCleanup; }
239 llvm::BasicBlock *getEHBlock() const { return EHBlock; }
240 void setEHBlock(llvm::BasicBlock *BB) { EHBlock = BB; }
241
242 bool isActive() const { return IsActive; }
243 void setActive(bool A) { IsActive = A; }
244
245 llvm::AllocaInst *getActiveFlag() const { return ActiveFlag; }
246 void setActiveFlag(llvm::AllocaInst *Var) { ActiveFlag = Var; }
247
248 void setTestFlagInNormalCleanup() { TestFlagInNormalCleanup = true; }
249 bool shouldTestFlagInNormalCleanup() const { return TestFlagInNormalCleanup; }
250
251 void setTestFlagInEHCleanup() { TestFlagInEHCleanup = true; }
252 bool shouldTestFlagInEHCleanup() const { return TestFlagInEHCleanup; }
253
254 unsigned getFixupDepth() const { return FixupDepth; }
255 EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
256 return EnclosingNormal;
257 }
258 EHScopeStack::stable_iterator getEnclosingEHCleanup() const {
259 return EnclosingEH;
260 }
261
262 size_t getCleanupSize() const { return CleanupSize; }
263 void *getCleanupBuffer() { return this + 1; }
264
265 EHScopeStack::Cleanup *getCleanup() {
266 return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer());
267 }
268
269 /// True if this cleanup scope has any branch-afters or branch-throughs.
270 bool hasBranches() const { return ExtInfo && !ExtInfo->Branches.empty(); }
271
272 /// Add a branch-after to this cleanup scope. A branch-after is a
273 /// branch from a point protected by this (normal) cleanup to a
274 /// point in the normal cleanup scope immediately containing it.
275 /// For example,
276 /// for (;;) { A a; break; }
277 /// contains a branch-after.
278 ///
279 /// Branch-afters each have their own destination out of the
280 /// cleanup, guaranteed distinct from anything else threaded through
281 /// it. Therefore branch-afters usually force a switch after the
282 /// cleanup.
283 void addBranchAfter(llvm::ConstantInt *Index,
284 llvm::BasicBlock *Block) {
285 struct ExtInfo &ExtInfo = getExtInfo();
286 if (ExtInfo.Branches.insert(Block))
287 ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index));
288 }
289
290 /// Return the number of unique branch-afters on this scope.
291 unsigned getNumBranchAfters() const {
292 return ExtInfo ? ExtInfo->BranchAfters.size() : 0;
293 }
294
295 llvm::BasicBlock *getBranchAfterBlock(unsigned I) const {
296 assert(I < getNumBranchAfters());
297 return ExtInfo->BranchAfters[I].first;
298 }
299
300 llvm::ConstantInt *getBranchAfterIndex(unsigned I) const {
301 assert(I < getNumBranchAfters());
302 return ExtInfo->BranchAfters[I].second;
303 }
304
305 /// Add a branch-through to this cleanup scope. A branch-through is
306 /// a branch from a scope protected by this (normal) cleanup to an
307 /// enclosing scope other than the immediately-enclosing normal
308 /// cleanup scope.
309 ///
310 /// In the following example, the branch through B's scope is a
311 /// branch-through, while the branch through A's scope is a
312 /// branch-after:
313 /// for (;;) { A a; B b; break; }
314 ///
315 /// All branch-throughs have a common destination out of the
316 /// cleanup, one possibly shared with the fall-through. Therefore
317 /// branch-throughs usually don't force a switch after the cleanup.
318 ///
319 /// \return true if the branch-through was new to this scope
320 bool addBranchThrough(llvm::BasicBlock *Block) {
321 return getExtInfo().Branches.insert(Block);
322 }
323
324 /// Determines if this cleanup scope has any branch throughs.
325 bool hasBranchThroughs() const {
326 if (!ExtInfo) return false;
327 return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size());
328 }
329
330 // Same stuff, only for EH branches instead of normal branches.
331 // It's quite possible that we could find a better representation
332 // for this.
333
334 bool hasEHBranches() const { return ExtInfo && !ExtInfo->EHBranches.empty(); }
335 void addEHBranchAfter(llvm::ConstantInt *Index,
336 llvm::BasicBlock *Block) {
337 struct ExtInfo &ExtInfo = getExtInfo();
338 if (ExtInfo.EHBranches.insert(Block))
339 ExtInfo.EHBranchAfters.push_back(std::make_pair(Block, Index));
340 }
341
342 unsigned getNumEHBranchAfters() const {
343 return ExtInfo ? ExtInfo->EHBranchAfters.size() : 0;
344 }
345
346 llvm::BasicBlock *getEHBranchAfterBlock(unsigned I) const {
347 assert(I < getNumEHBranchAfters());
348 return ExtInfo->EHBranchAfters[I].first;
349 }
350
351 llvm::ConstantInt *getEHBranchAfterIndex(unsigned I) const {
352 assert(I < getNumEHBranchAfters());
353 return ExtInfo->EHBranchAfters[I].second;
354 }
355
356 bool addEHBranchThrough(llvm::BasicBlock *Block) {
357 return getExtInfo().EHBranches.insert(Block);
358 }
359
360 bool hasEHBranchThroughs() const {
361 if (!ExtInfo) return false;
362 return (ExtInfo->EHBranchAfters.size() != ExtInfo->EHBranches.size());
363 }
364
365 static bool classof(const EHScope *Scope) {
366 return (Scope->getKind() == Cleanup);
367 }
368};
369
370/// An exceptions scope which filters exceptions thrown through it.
371/// Only exceptions matching the filter types will be permitted to be
372/// thrown.
373///
374/// This is used to implement C++ exception specifications.
375class EHFilterScope : public EHScope {
376 unsigned NumFilters : BitsRemaining;
377
378 // Essentially ends in a flexible array member:
379 // llvm::Value *FilterTypes[0];
380
381 llvm::Value **getFilters() {
382 return reinterpret_cast<llvm::Value**>(this+1);
383 }
384
385 llvm::Value * const *getFilters() const {
386 return reinterpret_cast<llvm::Value* const *>(this+1);
387 }
388
389public:
390 EHFilterScope(unsigned NumFilters) :
391 EHScope(Filter), NumFilters(NumFilters) {}
392
393 static size_t getSizeForNumFilters(unsigned NumFilters) {
394 return sizeof(EHFilterScope) + NumFilters * sizeof(llvm::Value*);
395 }
396
397 unsigned getNumFilters() const { return NumFilters; }
398
399 void setFilter(unsigned I, llvm::Value *FilterValue) {
400 assert(I < getNumFilters());
401 getFilters()[I] = FilterValue;
402 }
403
404 llvm::Value *getFilter(unsigned I) const {
405 assert(I < getNumFilters());
406 return getFilters()[I];
407 }
408
409 static bool classof(const EHScope *Scope) {
410 return Scope->getKind() == Filter;
411 }
412};
413
414/// An exceptions scope which calls std::terminate if any exception
415/// reaches it.
416class EHTerminateScope : public EHScope {
417 unsigned DestIndex : BitsRemaining;
418public:
419 EHTerminateScope(unsigned Index) : EHScope(Terminate), DestIndex(Index) {}
420 static size_t getSize() { return sizeof(EHTerminateScope); }
421
422 unsigned getDestIndex() const { return DestIndex; }
423
424 static bool classof(const EHScope *Scope) {
425 return Scope->getKind() == Terminate;
426 }
427};
428
429/// A non-stable pointer into the scope stack.
430class EHScopeStack::iterator {
431 char *Ptr;
432
433 friend class EHScopeStack;
434 explicit iterator(char *Ptr) : Ptr(Ptr) {}
435
436public:
437 iterator() : Ptr(0) {}
438
439 EHScope *get() const {
440 return reinterpret_cast<EHScope*>(Ptr);
441 }
442
443 EHScope *operator->() const { return get(); }
444 EHScope &operator*() const { return *get(); }
445
446 iterator &operator++() {
447 switch (get()->getKind()) {
448 case EHScope::Catch:
449 Ptr += EHCatchScope::getSizeForNumHandlers(
450 static_cast<const EHCatchScope*>(get())->getNumHandlers());
451 break;
452
453 case EHScope::Filter:
454 Ptr += EHFilterScope::getSizeForNumFilters(
455 static_cast<const EHFilterScope*>(get())->getNumFilters());
456 break;
457
458 case EHScope::Cleanup:
459 Ptr += static_cast<const EHCleanupScope*>(get())
460 ->getAllocatedSize();
461 break;
462
463 case EHScope::Terminate:
464 Ptr += EHTerminateScope::getSize();
465 break;
466 }
467
468 return *this;
469 }
470
471 iterator next() {
472 iterator copy = *this;
473 ++copy;
474 return copy;
475 }
476
477 iterator operator++(int) {
478 iterator copy = *this;
479 operator++();
480 return copy;
481 }
482
483 bool encloses(iterator other) const { return Ptr >= other.Ptr; }
484 bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; }
485
486 bool operator==(iterator other) const { return Ptr == other.Ptr; }
487 bool operator!=(iterator other) const { return Ptr != other.Ptr; }
488};
489
490inline EHScopeStack::iterator EHScopeStack::begin() const {
491 return iterator(StartOfData);
492}
493
494inline EHScopeStack::iterator EHScopeStack::end() const {
495 return iterator(EndOfBuffer);
496}
497
498inline void EHScopeStack::popCatch() {
499 assert(!empty() && "popping exception stack when not empty");
500
501 assert(isa<EHCatchScope>(*begin()));
502 StartOfData += EHCatchScope::getSizeForNumHandlers(
503 cast<EHCatchScope>(*begin()).getNumHandlers());
504
505 if (empty()) NextEHDestIndex = FirstEHDestIndex;
506
507 assert(CatchDepth > 0 && "mismatched catch/terminate push/pop");
508 CatchDepth--;
509}
510
511inline void EHScopeStack::popTerminate() {
512 assert(!empty() && "popping exception stack when not empty");
513
514 assert(isa<EHTerminateScope>(*begin()));
515 StartOfData += EHTerminateScope::getSize();
516
517 if (empty()) NextEHDestIndex = FirstEHDestIndex;
518
519 assert(CatchDepth > 0 && "mismatched catch/terminate push/pop");
520 CatchDepth--;
521}
522
523inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const {
524 assert(sp.isValid() && "finding invalid savepoint");
525 assert(sp.Size <= stable_begin().Size && "finding savepoint after pop");
526 return iterator(EndOfBuffer - sp.Size);
527}
528
529inline EHScopeStack::stable_iterator
530EHScopeStack::stabilize(iterator ir) const {
531 assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer);
532 return stable_iterator(EndOfBuffer - ir.Ptr);
533}
534
535inline EHScopeStack::stable_iterator
536EHScopeStack::getInnermostActiveNormalCleanup() const {
537 for (EHScopeStack::stable_iterator
538 I = getInnermostNormalCleanup(), E = stable_end(); I != E; ) {
539 EHCleanupScope &S = cast<EHCleanupScope>(*find(I));
540 if (S.isActive()) return I;
541 I = S.getEnclosingNormalCleanup();
542 }
543 return stable_end();
544}
545
546inline EHScopeStack::stable_iterator
547EHScopeStack::getInnermostActiveEHCleanup() const {
548 for (EHScopeStack::stable_iterator
549 I = getInnermostEHCleanup(), E = stable_end(); I != E; ) {
550 EHCleanupScope &S = cast<EHCleanupScope>(*find(I));
551 if (S.isActive()) return I;
552 I = S.getEnclosingEHCleanup();
553 }
554 return stable_end();
555}
556
557}
558}
559
560#endif