diff options
author | John McCall <rjmccall@apple.com> | 2010-07-13 20:32:21 +0000 |
---|---|---|
committer | John McCall <rjmccall@apple.com> | 2010-07-13 20:32:21 +0000 |
commit | 2b7fc3828e2245604a9b518167418d4bf650d5f2 (patch) | |
tree | 4c139957fc2a59adadc0315e4f5dead1e551c2ac /clang/lib/CodeGen/CGException.h | |
parent | caca5488dc290c3cbca702a90e3f7a8be7b23c06 (diff) | |
download | bcm5719-llvm-2b7fc3828e2245604a9b518167418d4bf650d5f2.tar.gz bcm5719-llvm-2b7fc3828e2245604a9b518167418d4bf650d5f2.zip |
Teach IR generation how to lazily emit cleanups. This has a lot of advantages,
mostly in avoiding unnecessary work at compile time but also in producing more
sensible block orderings.
Move the destructor cleanups for local variables over to use lazy cleanups.
Eventually all cleanups will do this; for now we have some awkward code
duplication.
Tell IR generation just to never produce landing pads in -fno-exceptions.
This is a much more comprehensive solution to a problem which previously was
half-solved by checks in most cleanup-generation spots.
llvm-svn: 108270
Diffstat (limited to 'clang/lib/CodeGen/CGException.h')
-rw-r--r-- | clang/lib/CodeGen/CGException.h | 92 |
1 files changed, 89 insertions, 3 deletions
diff --git a/clang/lib/CodeGen/CGException.h b/clang/lib/CodeGen/CGException.h index 8755dca2b22..80739cd8d73 100644 --- a/clang/lib/CodeGen/CGException.h +++ b/clang/lib/CodeGen/CGException.h @@ -31,13 +31,13 @@ namespace CodeGen { class EHScope { llvm::BasicBlock *CachedLandingPad; - unsigned K : 2; + unsigned K : 3; protected: - enum { BitsRemaining = 30 }; + enum { BitsRemaining = 29 }; public: - enum Kind { Cleanup, Catch, Terminate, Filter }; + enum Kind { Cleanup, LazyCleanup, Catch, Terminate, Filter }; EHScope(Kind K) : CachedLandingPad(0), K(K) {} @@ -127,6 +127,87 @@ public: } }; +/// A cleanup scope which generates the cleanup blocks lazily. +class EHLazyCleanupScope : public EHScope { + /// Whether this cleanup needs to be run along normal edges. + bool IsNormalCleanup : 1; + + /// Whether this cleanup needs to be run along exception edges. + bool IsEHCleanup : 1; + + /// The amount of extra storage needed by the LazyCleanup. + /// Always a multiple of the scope-stack alignment. + unsigned CleanupSize : 12; + + /// The number of fixups required by enclosing scopes (not including + /// this one). If this is the top cleanup scope, all the fixups + /// from this index onwards belong to this scope. + unsigned FixupDepth : BitsRemaining - 14; + + /// The nearest normal cleanup scope enclosing this one. + EHScopeStack::stable_iterator EnclosingNormal; + + /// The nearest EH cleanup scope enclosing this one. + EHScopeStack::stable_iterator EnclosingEH; + + /// The dual entry/exit block along the normal edge. This is lazily + /// created if needed before the cleanup is popped. + llvm::BasicBlock *NormalBlock; + + /// The dual entry/exit block along the EH edge. This is lazily + /// created if needed before the cleanup is popped. + llvm::BasicBlock *EHBlock; + +public: + /// Gets the size required for a lazy cleanup scope with the given + /// cleanup-data requirements. + static size_t getSizeForCleanupSize(size_t Size) { + return sizeof(EHLazyCleanupScope) + Size; + } + + size_t getAllocatedSize() const { + return sizeof(EHLazyCleanupScope) + CleanupSize; + } + + EHLazyCleanupScope(bool IsNormal, bool IsEH, unsigned CleanupSize, + unsigned FixupDepth, + EHScopeStack::stable_iterator EnclosingNormal, + EHScopeStack::stable_iterator EnclosingEH) + : EHScope(EHScope::LazyCleanup), + IsNormalCleanup(IsNormal), IsEHCleanup(IsEH), + CleanupSize(CleanupSize), FixupDepth(FixupDepth), + EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH), + NormalBlock(0), EHBlock(0) + {} + + bool isNormalCleanup() const { return IsNormalCleanup; } + llvm::BasicBlock *getNormalBlock() const { return NormalBlock; } + void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; } + + bool isEHCleanup() const { return IsEHCleanup; } + llvm::BasicBlock *getEHBlock() const { return EHBlock; } + void setEHBlock(llvm::BasicBlock *BB) { EHBlock = BB; } + + unsigned getFixupDepth() const { return FixupDepth; } + EHScopeStack::stable_iterator getEnclosingNormalCleanup() const { + return EnclosingNormal; + } + EHScopeStack::stable_iterator getEnclosingEHCleanup() const { + return EnclosingEH; + } + + size_t getCleanupSize() const { return CleanupSize; } + void *getCleanupBuffer() { return this + 1; } + + EHScopeStack::LazyCleanup *getCleanup() { + return reinterpret_cast<EHScopeStack::LazyCleanup*>(getCleanupBuffer()); + } + + static bool classof(const EHScope *Scope) { + return (Scope->getKind() == LazyCleanup); + } +}; + /// A scope which needs to execute some code if we try to unwind --- /// either normally, via the EH mechanism, or both --- through it. class EHCleanupScope : public EHScope { @@ -267,6 +348,11 @@ public: static_cast<const EHFilterScope*>(get())->getNumFilters()); break; + case EHScope::LazyCleanup: + Ptr += static_cast<const EHLazyCleanupScope*>(get()) + ->getAllocatedSize(); + break; + case EHScope::Cleanup: Ptr += EHCleanupScope::getSize(); break; |