summaryrefslogtreecommitdiffstats
path: root/clang/lib/AST/VTableBuilder.cpp
diff options
context:
space:
mode:
authorReid Kleckner <reid@kleckner.net>2014-01-03 23:42:00 +0000
committerReid Kleckner <reid@kleckner.net>2014-01-03 23:42:00 +0000
commit5f0809410640e75b6da9adf215b3ec97d07015b5 (patch)
tree62c4bbc6460bb858de73e95e285204173736b27a /clang/lib/AST/VTableBuilder.cpp
parent96e70d9148df9f62c0e0d9ff6368b90c0b440c97 (diff)
downloadbcm5719-llvm-5f0809410640e75b6da9adf215b3ec97d07015b5.tar.gz
bcm5719-llvm-5f0809410640e75b6da9adf215b3ec97d07015b5.zip
[ms-cxxabi] Improve vbtable name mangling accuracy
Summary: This makes us more compatible with MSVC 2012+ and fixes PR17748 where we would give two tables the same name. Rather than doing a fresh depth-first traversal of the inheritance graph for every record's vbtables, now we memoize vbtable paths for each record. By doing memoization, we end up considering virtual bases of subobjects that come later in the depth-first traversal. Where previously we would have ignored a virtual base that we'd already seen, we now consider it for name mangling purposes without emitting a duplicate vbtable for it. Reviewers: majnemer CC: cfe-commits Differential Revision: http://llvm-reviews.chandlerc.com/D2509 llvm-svn: 198462
Diffstat (limited to 'clang/lib/AST/VTableBuilder.cpp')
-rw-r--r--clang/lib/AST/VTableBuilder.cpp263
1 files changed, 101 insertions, 162 deletions
diff --git a/clang/lib/AST/VTableBuilder.cpp b/clang/lib/AST/VTableBuilder.cpp
index f6c8f1c3cf5..8cf699ebd53 100644
--- a/clang/lib/AST/VTableBuilder.cpp
+++ b/clang/lib/AST/VTableBuilder.cpp
@@ -16,6 +16,7 @@
#include "clang/AST/CXXInheritance.h"
#include "clang/AST/RecordLayout.h"
#include "clang/Basic/TargetInfo.h"
+#include "llvm/ADT/SmallPtrSet.h"
#include "llvm/Support/Format.h"
#include "llvm/Support/raw_ostream.h"
#include <algorithm>
@@ -3179,22 +3180,22 @@ void VFTableBuilder::dumpLayout(raw_ostream &Out) {
}
}
-namespace {
+static bool setsIntersect(const llvm::SmallPtrSet<const CXXRecordDecl *, 4> &A,
+ const llvm::ArrayRef<const CXXRecordDecl *> &B) {
+ for (llvm::ArrayRef<const CXXRecordDecl *>::iterator I = B.begin(),
+ E = B.end();
+ I != E; ++I) {
+ if (A.count(*I))
+ return true;
+ }
+ return false;
+}
-struct VBTablePath;
-typedef llvm::SmallVector<VBTablePath *, 6> VBTablePathVector;
+static bool rebucketPaths(VBTableVector &Paths);
-/// Produces MSVC-compatible vbtable data. The symbols produced by this builder
-/// match those produced by MSVC 2012, which is different from MSVC 2010.
-///
-/// Unlike Itanium, which uses only one vtable per class, MSVC uses a different
-/// symbol for every "address point" installed in base subobjects. As a result,
-/// we have to compute unique symbols for every table. Since there can be
-/// multiple non-virtual base subobjects of the same class, combining the most
-/// derived class with the base containing the vtable is insufficient. The most
-/// trivial algorithm would be to mangle in the entire path from base to most
-/// derived, but that would be too easy and would create unnecessarily large
-/// symbols. ;)
+/// Produces MSVC-compatible vbtable data. The symbols produced by this
+/// algorithm match those produced by MSVC 2012 and newer, which is different
+/// from MSVC 2010.
///
/// MSVC 2012 appears to minimize the vbtable names using the following
/// algorithm. First, walk the class hierarchy in the usual order, depth first,
@@ -3212,181 +3213,114 @@ typedef llvm::SmallVector<VBTablePath *, 6> VBTablePathVector;
/// unambiguous set of paths, MSVC doesn't need to extend any path more than once
/// to produce an unambiguous set of paths.
///
-/// The VBTableBuilder class attempts to implement this algorithm by repeatedly
-/// bucketing paths together by sorting them.
-///
/// TODO: Presumably vftables use the same algorithm.
-///
-/// TODO: Implement the MSVC 2010 name mangling scheme to avoid emitting
-/// duplicate vbtables with different symbols.
-
-class VBTableBuilder {
-public:
- VBTableBuilder(ASTContext &Context, const CXXRecordDecl *MostDerived);
-
- void enumerateVBTables(VBTableVector &VBTables);
-
-private:
- bool hasVBPtr(const CXXRecordDecl *RD);
-
- /// Enumerates paths to bases with vbptrs. The paths elements are compressed
- /// to contain only the classes necessary to form an unambiguous path.
- void findUnambiguousPaths(const CXXRecordDecl *ReusingBase,
- BaseSubobject CurSubobject,
- VBTablePathVector &Paths);
-
- void extendPath(VBTablePath *Info, bool SecondPass);
-
- bool rebucketPaths(VBTablePathVector &Paths, size_t PathsStart,
- bool SecondPass = false);
-
- ASTContext &Context;
-
- const CXXRecordDecl *MostDerived;
+void
+MicrosoftVTableContext::computeVBTablePaths(const CXXRecordDecl *RD,
+ VBTableVector &Paths) {
+ assert(Paths.empty());
+ const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
- /// Caches the layout of the most derived class.
- const ASTRecordLayout &DerivedLayout;
+ // Base case: this subobject has its own vbptr.
+ if (Layout.hasOwnVBPtr())
+ Paths.push_back(new VBTableInfo(RD));
- /// Set of vbases to avoid re-visiting the same vbases.
+ // Recursive case: get all the vbtables from our bases and remove anything
+ // that shares a virtual base.
llvm::SmallPtrSet<const CXXRecordDecl*, 4> VBasesSeen;
-};
-
-/// Holds intermediate data about a path to a vbptr inside a base subobject.
-struct VBTablePath {
- VBTablePath(const VBTableInfo &VBInfo)
- : VBInfo(VBInfo), NextBase(VBInfo.VBPtrSubobject.getBase()) { }
-
- /// All the data needed to build a vbtable, minus the GlobalVariable whose
- /// name we haven't computed yet.
- VBTableInfo VBInfo;
-
- /// Next base to use for disambiguation. Can be null if we've already
- /// disambiguated this path once.
- const CXXRecordDecl *NextBase;
-};
-
-} // end namespace
-
-VBTableBuilder::VBTableBuilder(ASTContext &Context,
- const CXXRecordDecl *MostDerived)
- : Context(Context), MostDerived(MostDerived),
- DerivedLayout(Context.getASTRecordLayout(MostDerived)) {}
-
-void VBTableBuilder::enumerateVBTables(VBTableVector &VBTables) {
- VBTablePathVector Paths;
- findUnambiguousPaths(MostDerived, BaseSubobject(MostDerived,
- CharUnits::Zero()), Paths);
- for (VBTablePathVector::iterator I = Paths.begin(), E = Paths.end();
+ for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
+ E = RD->bases_end();
I != E; ++I) {
- VBTablePath *P = *I;
- VBTables.push_back(P->VBInfo);
- }
-}
+ const CXXRecordDecl *Base = I->getType()->getAsCXXRecordDecl();
+ if (I->isVirtual() && VBasesSeen.count(Base))
+ continue;
+ const VBTableVector &BasePaths = enumerateVBTables(Base);
-void VBTableBuilder::findUnambiguousPaths(const CXXRecordDecl *ReusingBase,
- BaseSubobject CurSubobject,
- VBTablePathVector &Paths) {
- size_t PathsStart = Paths.size();
- bool ReuseVBPtrFromBase = true;
- const CXXRecordDecl *CurBase = CurSubobject.getBase();
- const ASTRecordLayout &Layout = Context.getASTRecordLayout(CurBase);
+ for (VBTableVector::const_iterator II = BasePaths.begin(),
+ EE = BasePaths.end();
+ II != EE; ++II) {
+ VBTableInfo *BasePath = *II;
- // If this base has a vbptr, then we've found a path. These are not full
- // paths, so we don't use CXXBasePath.
- if (Layout.hasOwnVBPtr()) {
- ReuseVBPtrFromBase = false;
- VBTablePath *Info = new VBTablePath(VBTableInfo(ReusingBase, CurSubobject));
- Paths.push_back(Info);
- }
+ // Don't include the path if it goes through a virtual base that we've
+ // already included.
+ if (setsIntersect(VBasesSeen, BasePath->ContainingVBases))
+ continue;
- // Recurse onto any bases which themselves have virtual bases.
- for (CXXRecordDecl::base_class_const_iterator I = CurBase->bases_begin(),
- E = CurBase->bases_end(); I != E; ++I) {
- const CXXRecordDecl *Base = I->getType()->getAsCXXRecordDecl();
- if (!Base->getNumVBases())
- continue; // Bases without virtual bases have no vbptrs.
- CharUnits NextOffset;
- const CXXRecordDecl *NextReusingBase = Base;
- if (I->isVirtual()) {
- if (!VBasesSeen.insert(Base))
- continue; // Don't visit virtual bases twice.
- NextOffset = DerivedLayout.getVBaseClassOffset(Base);
- } else {
- NextOffset = (CurSubobject.getBaseOffset() +
- Layout.getBaseClassOffset(Base));
-
- // If CurBase didn't have a vbptr, then ReusingBase will reuse the vbptr
- // from the first non-virtual base with vbases for its vbptr.
- if (ReuseVBPtrFromBase) {
- NextReusingBase = ReusingBase;
- ReuseVBPtrFromBase = false;
- }
+ // Copy the path and adjust it as necessary.
+ VBTableInfo *P = new VBTableInfo(*BasePath);
+
+ // We mangle Base into the path if the path would've been ambiguous and it
+ // wasn't already extended with Base.
+ if (P->MangledPath.empty() || P->MangledPath.back() != Base)
+ P->NextBaseToMangle = Base;
+
+ // Keep track of which derived class ultimately uses the vbtable, and what
+ // the full adjustment is from the MDC to this vbtable. The adjustment is
+ // captured by an optional vbase and a non-virtual offset.
+ if (Base == Layout.getBaseSharingVBPtr())
+ P->ReusingBase = RD;
+ if (I->isVirtual())
+ P->ContainingVBases.push_back(Base);
+ else if (P->ContainingVBases.empty())
+ P->NonVirtualOffset += Layout.getBaseClassOffset(Base);
+
+ Paths.push_back(P);
}
- size_t NumPaths = Paths.size();
- findUnambiguousPaths(NextReusingBase, BaseSubobject(Base, NextOffset),
- Paths);
-
- // Tag paths through this base with the base itself. We might use it to
- // disambiguate.
- for (size_t I = NumPaths, E = Paths.size(); I != E; ++I)
- Paths[I]->NextBase = Base;
+ // After visiting any direct base, we've transitively visited all of its
+ // morally virtual bases.
+ for (CXXRecordDecl::base_class_const_iterator II = Base->vbases_begin(),
+ EE = Base->vbases_end();
+ II != EE; ++II)
+ VBasesSeen.insert(II->getType()->getAsCXXRecordDecl());
}
- bool AmbiguousPaths = rebucketPaths(Paths, PathsStart);
- if (AmbiguousPaths)
- rebucketPaths(Paths, PathsStart, /*SecondPass=*/true);
-
-#ifndef NDEBUG
- // Check that the paths are in fact unique.
- for (size_t I = PathsStart + 1, E = Paths.size(); I != E; ++I) {
- assert(Paths[I]->VBInfo.MangledPath != Paths[I - 1]->VBInfo.MangledPath &&
- "vbtable paths are not unique");
- }
-#endif
+ // Sort the paths into buckets, and if any of them are ambiguous, extend all
+ // paths in ambiguous buckets.
+ bool Changed = true;
+ while (Changed)
+ Changed = rebucketPaths(Paths);
}
-static bool pathCompare(VBTablePath *LHS, VBTablePath *RHS) {
- return LHS->VBInfo.MangledPath < RHS->VBInfo.MangledPath;
+static bool pathCompare(const VBTableInfo *LHS, const VBTableInfo *RHS) {
+ return LHS->MangledPath < RHS->MangledPath;
}
-void VBTableBuilder::extendPath(VBTablePath *P, bool SecondPass) {
- assert(P->NextBase || SecondPass);
- if (P->NextBase) {
- P->VBInfo.MangledPath.push_back(P->NextBase);
- P->NextBase = 0; // Prevent the path from being extended twice.
+static bool extendPath(VBTableInfo *P) {
+ if (P->NextBaseToMangle) {
+ P->MangledPath.push_back(P->NextBaseToMangle);
+ P->NextBaseToMangle = 0; // Prevent the path from being extended twice.
+ return true;
}
+ return false;
}
-bool VBTableBuilder::rebucketPaths(VBTablePathVector &Paths, size_t PathsStart,
- bool SecondPass) {
+static bool rebucketPaths(VBTableVector &Paths) {
// What we're essentially doing here is bucketing together ambiguous paths.
// Any bucket with more than one path in it gets extended by NextBase, which
// is usually the direct base of the inherited the vbptr. This code uses a
// sorted vector to implement a multiset to form the buckets. Note that the
// ordering is based on pointers, but it doesn't change our output order. The
// current algorithm is designed to match MSVC 2012's names.
- // TODO: Implement MSVC 2010 or earlier names to avoid extra vbtable cruft.
- VBTablePathVector PathsSorted(&Paths[PathsStart], &Paths.back() + 1);
+ VBTableVector PathsSorted(Paths);
std::sort(PathsSorted.begin(), PathsSorted.end(), pathCompare);
- bool AmbiguousPaths = false;
+ bool Changed = false;
for (size_t I = 0, E = PathsSorted.size(); I != E;) {
// Scan forward to find the end of the bucket.
size_t BucketStart = I;
do {
++I;
- } while (I != E && PathsSorted[BucketStart]->VBInfo.MangledPath ==
- PathsSorted[I]->VBInfo.MangledPath);
+ } while (I != E && PathsSorted[BucketStart]->MangledPath ==
+ PathsSorted[I]->MangledPath);
// If this bucket has multiple paths, extend them all.
if (I - BucketStart > 1) {
- AmbiguousPaths = true;
for (size_t II = BucketStart; II != I; ++II)
- extendPath(PathsSorted[II], SecondPass);
+ Changed |= extendPath(PathsSorted[II]);
+ assert(Changed && "no paths were extended to fix ambiguity");
}
}
- return AmbiguousPaths;
+ return Changed;
}
MicrosoftVTableContext::~MicrosoftVTableContext() {
@@ -3608,13 +3542,18 @@ void MicrosoftVTableContext::dumpMethodLocations(
const VirtualBaseInfo *MicrosoftVTableContext::computeVBTableRelatedInformation(
const CXXRecordDecl *RD) {
- VirtualBaseInfo *&Entry = VBaseInfo[RD];
- if (Entry)
- return Entry;
+ VirtualBaseInfo *VBI;
- Entry = new VirtualBaseInfo();
+ {
+ // Get or create a VBI for RD. Don't hold a reference to the DenseMap cell,
+ // as it may be modified and rehashed under us.
+ VirtualBaseInfo *&Entry = VBaseInfo[RD];
+ if (Entry)
+ return Entry;
+ Entry = VBI = new VirtualBaseInfo();
+ }
- VBTableBuilder(Context, RD).enumerateVBTables(Entry->VBTables);
+ computeVBTablePaths(RD, VBI->VBTables);
// First, see if the Derived class shared the vbptr with a non-virtual base.
const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
@@ -3623,22 +3562,22 @@ const VirtualBaseInfo *MicrosoftVTableContext::computeVBTableRelatedInformation(
// virtual bases come first so that the layout is the same.
const VirtualBaseInfo *BaseInfo =
computeVBTableRelatedInformation(VBPtrBase);
- Entry->VBTableIndices.insert(BaseInfo->VBTableIndices.begin(),
- BaseInfo->VBTableIndices.end());
+ VBI->VBTableIndices.insert(BaseInfo->VBTableIndices.begin(),
+ BaseInfo->VBTableIndices.end());
}
// New vbases are added to the end of the vbtable.
// Skip the self entry and vbases visited in the non-virtual base, if any.
- unsigned VBTableIndex = 1 + Entry->VBTableIndices.size();
+ unsigned VBTableIndex = 1 + VBI->VBTableIndices.size();
for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(),
E = RD->vbases_end();
I != E; ++I) {
const CXXRecordDecl *CurVBase = I->getType()->getAsCXXRecordDecl();
- if (!Entry->VBTableIndices.count(CurVBase))
- Entry->VBTableIndices[CurVBase] = VBTableIndex++;
+ if (!VBI->VBTableIndices.count(CurVBase))
+ VBI->VBTableIndices[CurVBase] = VBTableIndex++;
}
- return Entry;
+ return VBI;
}
unsigned MicrosoftVTableContext::getVBTableIndex(const CXXRecordDecl *Derived,
OpenPOWER on IntegriCloud