diff options
Diffstat (limited to 'clang')
-rw-r--r-- | clang/include/clang/AST/VTableBuilder.h | 37 | ||||
-rw-r--r-- | clang/lib/AST/VTableBuilder.cpp | 263 | ||||
-rw-r--r-- | clang/lib/CodeGen/MicrosoftCXXABI.cpp | 31 | ||||
-rw-r--r-- | clang/test/CodeGenCXX/microsoft-abi-vbtables.cpp | 41 |
4 files changed, 193 insertions, 179 deletions
diff --git a/clang/include/clang/AST/VTableBuilder.h b/clang/include/clang/AST/VTableBuilder.h index 355df9bbfba..5362f6fe2f3 100644 --- a/clang/include/clang/AST/VTableBuilder.h +++ b/clang/include/clang/AST/VTableBuilder.h @@ -412,31 +412,54 @@ struct VFPtrInfo { /// Holds information for a virtual base table for a single subobject. A record /// may contain as many vbptrs as there are base subobjects. struct VBTableInfo { - VBTableInfo(const CXXRecordDecl *ReusingBase, BaseSubobject VBPtrSubobject) - : ReusingBase(ReusingBase), VBPtrSubobject(VBPtrSubobject) { } + VBTableInfo(const CXXRecordDecl *RD) + : ReusingBase(RD), BaseWithVBPtr(RD), NextBaseToMangle(RD) {} + + // Copy constructor. + // FIXME: Uncomment when we've moved to C++11. + //VBTableInfo(const VBTableInfo &) = default; /// The vbtable will hold all of the virtual bases of ReusingBase. This may /// or may not be the same class as VBPtrSubobject.Base. A derived class will /// reuse the vbptr of the first non-virtual base subobject that has one. const CXXRecordDecl *ReusingBase; + /// BaseWithVBPtr is at this offset from its containing complete object or + /// virtual base. + CharUnits NonVirtualOffset; + /// The vbptr is stored inside this subobject. - BaseSubobject VBPtrSubobject; + const CXXRecordDecl *BaseWithVBPtr; /// The bases from the inheritance path that got used to mangle the vbtable /// name. This is not really a full path like a CXXBasePath. It holds the /// subset of records that need to be mangled into the vbtable symbol name in /// order to get a unique name. - llvm::SmallVector<const CXXRecordDecl *, 1> MangledPath; + SmallVector<const CXXRecordDecl *, 1> MangledPath; + + /// The next base to push onto the mangled path if this path is ambiguous in a + /// derived class. If it's null, then it's already been pushed onto the path. + const CXXRecordDecl *NextBaseToMangle; + + /// The set of possibly indirect vbases that contain this vbtable. When a + /// derived class indirectly inherits from the same vbase twice, we only keep + /// vbtables and their paths from the first instance. + SmallVector<const CXXRecordDecl *, 1> ContainingVBases; + + /// The vbptr is stored inside the non-virtual component of this virtual base. + const CXXRecordDecl *getVBaseWithVBPtr() const { + return ContainingVBases.empty() ? 0 : ContainingVBases.front(); + } }; -// FIXME: Don't store these by value, they contain vectors. -typedef SmallVector<VBTableInfo, 2> VBTableVector; +typedef SmallVector<VBTableInfo *, 2> VBTableVector; /// All virtual base related information about a given record decl. Includes /// information on all virtual base tables and the path components that are used /// to mangle them. struct VirtualBaseInfo { + ~VirtualBaseInfo() { llvm::DeleteContainerPointers(VBTables); } + /// A map from virtual base to vbtable index for doing a conversion from the /// the derived class to the a base. llvm::DenseMap<const CXXRecordDecl *, unsigned> VBTableIndices; @@ -524,6 +547,8 @@ private: const VirtualBaseInfo * computeVBTableRelatedInformation(const CXXRecordDecl *RD); + void computeVBTablePaths(const CXXRecordDecl *RD, VBTableVector &Paths); + public: MicrosoftVTableContext(ASTContext &Context) : VTableContextBase(/*MS=*/true), Context(Context) {} diff --git a/clang/lib/AST/VTableBuilder.cpp b/clang/lib/AST/VTableBuilder.cpp index f6c8f1c3cf5..8cf699ebd53 100644 --- a/clang/lib/AST/VTableBuilder.cpp +++ b/clang/lib/AST/VTableBuilder.cpp @@ -16,6 +16,7 @@ #include "clang/AST/CXXInheritance.h" #include "clang/AST/RecordLayout.h" #include "clang/Basic/TargetInfo.h" +#include "llvm/ADT/SmallPtrSet.h" #include "llvm/Support/Format.h" #include "llvm/Support/raw_ostream.h" #include <algorithm> @@ -3179,22 +3180,22 @@ void VFTableBuilder::dumpLayout(raw_ostream &Out) { } } -namespace { +static bool setsIntersect(const llvm::SmallPtrSet<const CXXRecordDecl *, 4> &A, + const llvm::ArrayRef<const CXXRecordDecl *> &B) { + for (llvm::ArrayRef<const CXXRecordDecl *>::iterator I = B.begin(), + E = B.end(); + I != E; ++I) { + if (A.count(*I)) + return true; + } + return false; +} -struct VBTablePath; -typedef llvm::SmallVector<VBTablePath *, 6> VBTablePathVector; +static bool rebucketPaths(VBTableVector &Paths); -/// Produces MSVC-compatible vbtable data. The symbols produced by this builder -/// match those produced by MSVC 2012, which is different from MSVC 2010. -/// -/// Unlike Itanium, which uses only one vtable per class, MSVC uses a different -/// symbol for every "address point" installed in base subobjects. As a result, -/// we have to compute unique symbols for every table. Since there can be -/// multiple non-virtual base subobjects of the same class, combining the most -/// derived class with the base containing the vtable is insufficient. The most -/// trivial algorithm would be to mangle in the entire path from base to most -/// derived, but that would be too easy and would create unnecessarily large -/// symbols. ;) +/// Produces MSVC-compatible vbtable data. The symbols produced by this +/// algorithm match those produced by MSVC 2012 and newer, which is different +/// from MSVC 2010. /// /// MSVC 2012 appears to minimize the vbtable names using the following /// algorithm. First, walk the class hierarchy in the usual order, depth first, @@ -3212,181 +3213,114 @@ typedef llvm::SmallVector<VBTablePath *, 6> VBTablePathVector; /// unambiguous set of paths, MSVC doesn't need to extend any path more than once /// to produce an unambiguous set of paths. /// -/// The VBTableBuilder class attempts to implement this algorithm by repeatedly -/// bucketing paths together by sorting them. -/// /// TODO: Presumably vftables use the same algorithm. -/// -/// TODO: Implement the MSVC 2010 name mangling scheme to avoid emitting -/// duplicate vbtables with different symbols. - -class VBTableBuilder { -public: - VBTableBuilder(ASTContext &Context, const CXXRecordDecl *MostDerived); - - void enumerateVBTables(VBTableVector &VBTables); - -private: - bool hasVBPtr(const CXXRecordDecl *RD); - - /// Enumerates paths to bases with vbptrs. The paths elements are compressed - /// to contain only the classes necessary to form an unambiguous path. - void findUnambiguousPaths(const CXXRecordDecl *ReusingBase, - BaseSubobject CurSubobject, - VBTablePathVector &Paths); - - void extendPath(VBTablePath *Info, bool SecondPass); - - bool rebucketPaths(VBTablePathVector &Paths, size_t PathsStart, - bool SecondPass = false); - - ASTContext &Context; - - const CXXRecordDecl *MostDerived; +void +MicrosoftVTableContext::computeVBTablePaths(const CXXRecordDecl *RD, + VBTableVector &Paths) { + assert(Paths.empty()); + const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); - /// Caches the layout of the most derived class. - const ASTRecordLayout &DerivedLayout; + // Base case: this subobject has its own vbptr. + if (Layout.hasOwnVBPtr()) + Paths.push_back(new VBTableInfo(RD)); - /// Set of vbases to avoid re-visiting the same vbases. + // Recursive case: get all the vbtables from our bases and remove anything + // that shares a virtual base. llvm::SmallPtrSet<const CXXRecordDecl*, 4> VBasesSeen; -}; - -/// Holds intermediate data about a path to a vbptr inside a base subobject. -struct VBTablePath { - VBTablePath(const VBTableInfo &VBInfo) - : VBInfo(VBInfo), NextBase(VBInfo.VBPtrSubobject.getBase()) { } - - /// All the data needed to build a vbtable, minus the GlobalVariable whose - /// name we haven't computed yet. - VBTableInfo VBInfo; - - /// Next base to use for disambiguation. Can be null if we've already - /// disambiguated this path once. - const CXXRecordDecl *NextBase; -}; - -} // end namespace - -VBTableBuilder::VBTableBuilder(ASTContext &Context, - const CXXRecordDecl *MostDerived) - : Context(Context), MostDerived(MostDerived), - DerivedLayout(Context.getASTRecordLayout(MostDerived)) {} - -void VBTableBuilder::enumerateVBTables(VBTableVector &VBTables) { - VBTablePathVector Paths; - findUnambiguousPaths(MostDerived, BaseSubobject(MostDerived, - CharUnits::Zero()), Paths); - for (VBTablePathVector::iterator I = Paths.begin(), E = Paths.end(); + for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(), + E = RD->bases_end(); I != E; ++I) { - VBTablePath *P = *I; - VBTables.push_back(P->VBInfo); - } -} + const CXXRecordDecl *Base = I->getType()->getAsCXXRecordDecl(); + if (I->isVirtual() && VBasesSeen.count(Base)) + continue; + const VBTableVector &BasePaths = enumerateVBTables(Base); -void VBTableBuilder::findUnambiguousPaths(const CXXRecordDecl *ReusingBase, - BaseSubobject CurSubobject, - VBTablePathVector &Paths) { - size_t PathsStart = Paths.size(); - bool ReuseVBPtrFromBase = true; - const CXXRecordDecl *CurBase = CurSubobject.getBase(); - const ASTRecordLayout &Layout = Context.getASTRecordLayout(CurBase); + for (VBTableVector::const_iterator II = BasePaths.begin(), + EE = BasePaths.end(); + II != EE; ++II) { + VBTableInfo *BasePath = *II; - // If this base has a vbptr, then we've found a path. These are not full - // paths, so we don't use CXXBasePath. - if (Layout.hasOwnVBPtr()) { - ReuseVBPtrFromBase = false; - VBTablePath *Info = new VBTablePath(VBTableInfo(ReusingBase, CurSubobject)); - Paths.push_back(Info); - } + // Don't include the path if it goes through a virtual base that we've + // already included. + if (setsIntersect(VBasesSeen, BasePath->ContainingVBases)) + continue; - // Recurse onto any bases which themselves have virtual bases. - for (CXXRecordDecl::base_class_const_iterator I = CurBase->bases_begin(), - E = CurBase->bases_end(); I != E; ++I) { - const CXXRecordDecl *Base = I->getType()->getAsCXXRecordDecl(); - if (!Base->getNumVBases()) - continue; // Bases without virtual bases have no vbptrs. - CharUnits NextOffset; - const CXXRecordDecl *NextReusingBase = Base; - if (I->isVirtual()) { - if (!VBasesSeen.insert(Base)) - continue; // Don't visit virtual bases twice. - NextOffset = DerivedLayout.getVBaseClassOffset(Base); - } else { - NextOffset = (CurSubobject.getBaseOffset() + - Layout.getBaseClassOffset(Base)); - - // If CurBase didn't have a vbptr, then ReusingBase will reuse the vbptr - // from the first non-virtual base with vbases for its vbptr. - if (ReuseVBPtrFromBase) { - NextReusingBase = ReusingBase; - ReuseVBPtrFromBase = false; - } + // Copy the path and adjust it as necessary. + VBTableInfo *P = new VBTableInfo(*BasePath); + + // We mangle Base into the path if the path would've been ambiguous and it + // wasn't already extended with Base. + if (P->MangledPath.empty() || P->MangledPath.back() != Base) + P->NextBaseToMangle = Base; + + // Keep track of which derived class ultimately uses the vbtable, and what + // the full adjustment is from the MDC to this vbtable. The adjustment is + // captured by an optional vbase and a non-virtual offset. + if (Base == Layout.getBaseSharingVBPtr()) + P->ReusingBase = RD; + if (I->isVirtual()) + P->ContainingVBases.push_back(Base); + else if (P->ContainingVBases.empty()) + P->NonVirtualOffset += Layout.getBaseClassOffset(Base); + + Paths.push_back(P); } - size_t NumPaths = Paths.size(); - findUnambiguousPaths(NextReusingBase, BaseSubobject(Base, NextOffset), - Paths); - - // Tag paths through this base with the base itself. We might use it to - // disambiguate. - for (size_t I = NumPaths, E = Paths.size(); I != E; ++I) - Paths[I]->NextBase = Base; + // After visiting any direct base, we've transitively visited all of its + // morally virtual bases. + for (CXXRecordDecl::base_class_const_iterator II = Base->vbases_begin(), + EE = Base->vbases_end(); + II != EE; ++II) + VBasesSeen.insert(II->getType()->getAsCXXRecordDecl()); } - bool AmbiguousPaths = rebucketPaths(Paths, PathsStart); - if (AmbiguousPaths) - rebucketPaths(Paths, PathsStart, /*SecondPass=*/true); - -#ifndef NDEBUG - // Check that the paths are in fact unique. - for (size_t I = PathsStart + 1, E = Paths.size(); I != E; ++I) { - assert(Paths[I]->VBInfo.MangledPath != Paths[I - 1]->VBInfo.MangledPath && - "vbtable paths are not unique"); - } -#endif + // Sort the paths into buckets, and if any of them are ambiguous, extend all + // paths in ambiguous buckets. + bool Changed = true; + while (Changed) + Changed = rebucketPaths(Paths); } -static bool pathCompare(VBTablePath *LHS, VBTablePath *RHS) { - return LHS->VBInfo.MangledPath < RHS->VBInfo.MangledPath; +static bool pathCompare(const VBTableInfo *LHS, const VBTableInfo *RHS) { + return LHS->MangledPath < RHS->MangledPath; } -void VBTableBuilder::extendPath(VBTablePath *P, bool SecondPass) { - assert(P->NextBase || SecondPass); - if (P->NextBase) { - P->VBInfo.MangledPath.push_back(P->NextBase); - P->NextBase = 0; // Prevent the path from being extended twice. +static bool extendPath(VBTableInfo *P) { + if (P->NextBaseToMangle) { + P->MangledPath.push_back(P->NextBaseToMangle); + P->NextBaseToMangle = 0; // Prevent the path from being extended twice. + return true; } + return false; } -bool VBTableBuilder::rebucketPaths(VBTablePathVector &Paths, size_t PathsStart, - bool SecondPass) { +static bool rebucketPaths(VBTableVector &Paths) { // What we're essentially doing here is bucketing together ambiguous paths. // Any bucket with more than one path in it gets extended by NextBase, which // is usually the direct base of the inherited the vbptr. This code uses a // sorted vector to implement a multiset to form the buckets. Note that the // ordering is based on pointers, but it doesn't change our output order. The // current algorithm is designed to match MSVC 2012's names. - // TODO: Implement MSVC 2010 or earlier names to avoid extra vbtable cruft. - VBTablePathVector PathsSorted(&Paths[PathsStart], &Paths.back() + 1); + VBTableVector PathsSorted(Paths); std::sort(PathsSorted.begin(), PathsSorted.end(), pathCompare); - bool AmbiguousPaths = false; + bool Changed = false; for (size_t I = 0, E = PathsSorted.size(); I != E;) { // Scan forward to find the end of the bucket. size_t BucketStart = I; do { ++I; - } while (I != E && PathsSorted[BucketStart]->VBInfo.MangledPath == - PathsSorted[I]->VBInfo.MangledPath); + } while (I != E && PathsSorted[BucketStart]->MangledPath == + PathsSorted[I]->MangledPath); // If this bucket has multiple paths, extend them all. if (I - BucketStart > 1) { - AmbiguousPaths = true; for (size_t II = BucketStart; II != I; ++II) - extendPath(PathsSorted[II], SecondPass); + Changed |= extendPath(PathsSorted[II]); + assert(Changed && "no paths were extended to fix ambiguity"); } } - return AmbiguousPaths; + return Changed; } MicrosoftVTableContext::~MicrosoftVTableContext() { @@ -3608,13 +3542,18 @@ void MicrosoftVTableContext::dumpMethodLocations( const VirtualBaseInfo *MicrosoftVTableContext::computeVBTableRelatedInformation( const CXXRecordDecl *RD) { - VirtualBaseInfo *&Entry = VBaseInfo[RD]; - if (Entry) - return Entry; + VirtualBaseInfo *VBI; - Entry = new VirtualBaseInfo(); + { + // Get or create a VBI for RD. Don't hold a reference to the DenseMap cell, + // as it may be modified and rehashed under us. + VirtualBaseInfo *&Entry = VBaseInfo[RD]; + if (Entry) + return Entry; + Entry = VBI = new VirtualBaseInfo(); + } - VBTableBuilder(Context, RD).enumerateVBTables(Entry->VBTables); + computeVBTablePaths(RD, VBI->VBTables); // First, see if the Derived class shared the vbptr with a non-virtual base. const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD); @@ -3623,22 +3562,22 @@ const VirtualBaseInfo *MicrosoftVTableContext::computeVBTableRelatedInformation( // virtual bases come first so that the layout is the same. const VirtualBaseInfo *BaseInfo = computeVBTableRelatedInformation(VBPtrBase); - Entry->VBTableIndices.insert(BaseInfo->VBTableIndices.begin(), - BaseInfo->VBTableIndices.end()); + VBI->VBTableIndices.insert(BaseInfo->VBTableIndices.begin(), + BaseInfo->VBTableIndices.end()); } // New vbases are added to the end of the vbtable. // Skip the self entry and vbases visited in the non-virtual base, if any. - unsigned VBTableIndex = 1 + Entry->VBTableIndices.size(); + unsigned VBTableIndex = 1 + VBI->VBTableIndices.size(); for (CXXRecordDecl::base_class_const_iterator I = RD->vbases_begin(), E = RD->vbases_end(); I != E; ++I) { const CXXRecordDecl *CurVBase = I->getType()->getAsCXXRecordDecl(); - if (!Entry->VBTableIndices.count(CurVBase)) - Entry->VBTableIndices[CurVBase] = VBTableIndex++; + if (!VBI->VBTableIndices.count(CurVBase)) + VBI->VBTableIndices[CurVBase] = VBTableIndex++; } - return Entry; + return VBI; } unsigned MicrosoftVTableContext::getVBTableIndex(const CXXRecordDecl *Derived, diff --git a/clang/lib/CodeGen/MicrosoftCXXABI.cpp b/clang/lib/CodeGen/MicrosoftCXXABI.cpp index 5e06413f1a2..fc116925895 100644 --- a/clang/lib/CodeGen/MicrosoftCXXABI.cpp +++ b/clang/lib/CodeGen/MicrosoftCXXABI.cpp @@ -564,19 +564,22 @@ void MicrosoftCXXABI::EmitVBPtrStores(CodeGenFunction &CGF, const CXXRecordDecl *RD) { llvm::Value *ThisInt8Ptr = CGF.Builder.CreateBitCast(getThisValue(CGF), CGM.Int8PtrTy, "this.int8"); + const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD); const VBTableGlobals &VBGlobals = enumerateVBTables(RD); for (unsigned I = 0, E = VBGlobals.VBTables->size(); I != E; ++I) { - const VBTableInfo &VBT = (*VBGlobals.VBTables)[I]; + const VBTableInfo *VBT = (*VBGlobals.VBTables)[I]; llvm::GlobalVariable *GV = VBGlobals.Globals[I]; const ASTRecordLayout &SubobjectLayout = - CGM.getContext().getASTRecordLayout(VBT.VBPtrSubobject.getBase()); - uint64_t Offs = (VBT.VBPtrSubobject.getBaseOffset() + - SubobjectLayout.getVBPtrOffset()).getQuantity(); + CGM.getContext().getASTRecordLayout(VBT->BaseWithVBPtr); + CharUnits Offs = VBT->NonVirtualOffset; + Offs += SubobjectLayout.getVBPtrOffset(); + if (VBT->getVBaseWithVBPtr()) + Offs += Layout.getVBaseClassOffset(VBT->getVBaseWithVBPtr()); llvm::Value *VBPtr = - CGF.Builder.CreateConstInBoundsGEP1_64(ThisInt8Ptr, Offs); + CGF.Builder.CreateConstInBoundsGEP1_64(ThisInt8Ptr, Offs.getQuantity()); VBPtr = CGF.Builder.CreateBitCast(VBPtr, GV->getType()->getPointerTo(0), - "vbptr." + VBT.ReusingBase->getName()); + "vbptr." + VBT->ReusingBase->getName()); CGF.Builder.CreateStore(GV, VBPtr); } } @@ -1019,7 +1022,7 @@ MicrosoftCXXABI::enumerateVBTables(const CXXRecordDecl *RD) { for (VBTableVector::const_iterator I = VBGlobals.VBTables->begin(), E = VBGlobals.VBTables->end(); I != E; ++I) { - VBGlobals.Globals.push_back(getAddrOfVBTable(*I, RD, Linkage)); + VBGlobals.Globals.push_back(getAddrOfVBTable(**I, RD, Linkage)); } return VBGlobals; @@ -1064,9 +1067,9 @@ MicrosoftCXXABI::EmitVirtualMemPtrThunk(const CXXMethodDecl *MD, void MicrosoftCXXABI::emitVirtualInheritanceTables(const CXXRecordDecl *RD) { const VBTableGlobals &VBGlobals = enumerateVBTables(RD); for (unsigned I = 0, E = VBGlobals.VBTables->size(); I != E; ++I) { - const VBTableInfo &VBT = (*VBGlobals.VBTables)[I]; + const VBTableInfo *VBT = (*VBGlobals.VBTables)[I]; llvm::GlobalVariable *GV = VBGlobals.Globals[I]; - emitVBTableDefinition(VBT, RD, GV); + emitVBTableDefinition(*VBT, RD, GV); } } @@ -1102,7 +1105,7 @@ void MicrosoftCXXABI::emitVBTableDefinition(const VBTableInfo &VBT, "should only emit vbtables for classes with vbtables"); const ASTRecordLayout &BaseLayout = - CGM.getContext().getASTRecordLayout(VBT.VBPtrSubobject.getBase()); + CGM.getContext().getASTRecordLayout(VBT.BaseWithVBPtr); const ASTRecordLayout &DerivedLayout = CGM.getContext().getASTRecordLayout(RD); @@ -1119,8 +1122,14 @@ void MicrosoftCXXABI::emitVBTableDefinition(const VBTableInfo &VBT, const CXXRecordDecl *VBase = I->getType()->getAsCXXRecordDecl(); CharUnits Offset = DerivedLayout.getVBaseClassOffset(VBase); assert(!Offset.isNegative()); + // Make it relative to the subobject vbptr. - Offset -= VBT.VBPtrSubobject.getBaseOffset() + VBPtrOffset; + CharUnits CompleteVBPtrOffset = VBT.NonVirtualOffset + VBPtrOffset; + if (VBT.getVBaseWithVBPtr()) + CompleteVBPtrOffset += + DerivedLayout.getVBaseClassOffset(VBT.getVBaseWithVBPtr()); + Offset -= CompleteVBPtrOffset; + unsigned VBIndex = Context.getVBTableIndex(ReusingBase, VBase); assert(Offsets[VBIndex] == 0 && "The same vbindex seen twice?"); Offsets[VBIndex] = llvm::ConstantInt::get(CGM.IntTy, Offset.getQuantity()); diff --git a/clang/test/CodeGenCXX/microsoft-abi-vbtables.cpp b/clang/test/CodeGenCXX/microsoft-abi-vbtables.cpp index 6de556b1d81..79e32a84efd 100644 --- a/clang/test/CodeGenCXX/microsoft-abi-vbtables.cpp +++ b/clang/test/CodeGenCXX/microsoft-abi-vbtables.cpp @@ -477,3 +477,44 @@ F f; // CHECK-DAG: @"\01??_8F@Test26@@7BD@1@@" = linkonce_odr unnamed_addr constant [4 x i32] [i32 0, i32 16, i32 12, i32 20] // CHECK-DAG: @"\01??_8F@Test26@@7BE@1@@" = linkonce_odr unnamed_addr constant [2 x i32] [i32 -4, i32 28] } + +namespace Test27 { +// PR17748 +struct A {}; +struct B : virtual A {}; +struct C : virtual B {}; +struct D : C, B {}; +struct E : D {}; +struct F : C, E {}; +struct G : F, D, C, B {}; +G x; + +// CHECK-DAG: @"\01??_8G@Test27@@7BB@1@@" = +// CHECK-DAG: @"\01??_8G@Test27@@7BB@1@F@1@@" = +// CHECK-DAG: @"\01??_8G@Test27@@7BC@1@@" = +// CHECK-DAG: @"\01??_8G@Test27@@7BC@1@D@1@@" = +// CHECK-DAG: @"\01??_8G@Test27@@7BC@1@E@1@@" = +// CHECK-DAG: @"\01??_8G@Test27@@7BC@1@F@1@@" = +// CHECK-DAG: @"\01??_8G@Test27@@7BD@1@@" = +// CHECK-DAG: @"\01??_8G@Test27@@7BF@1@@" = +} + +namespace Test28 { +// PR17748 +struct A {}; +struct B : virtual A {}; +struct C : virtual B {}; +struct D : C, B {}; +struct E : C, D {}; +struct F : virtual E, virtual D, virtual C {}; +F x; + +// CHECK-DAG: @"\01??_8F@Test28@@7B01@@" = +// CHECK-DAG: @"\01??_8F@Test28@@7BB@1@@" = +// CHECK-DAG: @"\01??_8F@Test28@@7BC@1@@" = +// CHECK-DAG: @"\01??_8F@Test28@@7BC@1@D@1@@" = +// CHECK-DAG: @"\01??_8F@Test28@@7BC@1@D@1@E@1@@" = +// CHECK-DAG: @"\01??_8F@Test28@@7BC@1@E@1@@" = +// CHECK-DAG: @"\01??_8F@Test28@@7BD@1@@" = +// CHECK-DAG: @"\01??_8F@Test28@@7BE@1@@" = +} |