[cfe-commits] r79156 - in /cfe/trunk: lib/AST/RecordLayoutBuilder.cpp lib/AST/RecordLayoutBuilder.h lib/CodeGen/CGCXX.cpp lib/CodeGen/CodeGenFunction.h test/CodeGenCXX/virt.cpp

Mike Stump mrs at apple.com
Sat Aug 15 18:46:26 PDT 2009


Author: mrs
Date: Sat Aug 15 20:46:26 2009
New Revision: 79156

URL: http://llvm.org/viewvc/llvm-project?rev=79156&view=rev
Log:
Cleanups and fixups for calculating the virtual base offsets.  WIP.

Modified:
    cfe/trunk/lib/AST/RecordLayoutBuilder.cpp
    cfe/trunk/lib/AST/RecordLayoutBuilder.h
    cfe/trunk/lib/CodeGen/CGCXX.cpp
    cfe/trunk/lib/CodeGen/CodeGenFunction.h
    cfe/trunk/test/CodeGenCXX/virt.cpp

Modified: cfe/trunk/lib/AST/RecordLayoutBuilder.cpp
URL: http://llvm.org/viewvc/llvm-project/cfe/trunk/lib/AST/RecordLayoutBuilder.cpp?rev=79156&r1=79155&r2=79156&view=diff

==============================================================================
--- cfe/trunk/lib/AST/RecordLayoutBuilder.cpp (original)
+++ cfe/trunk/lib/AST/RecordLayoutBuilder.cpp Sat Aug 15 20:46:26 2009
@@ -168,18 +168,41 @@
 }
 
 void ASTRecordLayoutBuilder::LayoutVirtualBases(const CXXRecordDecl *RD,
+                                                int64_t Offset,
+                                 llvm::SmallSet<const CXXRecordDecl*, 32> &mark,
                     llvm::SmallSet<const CXXRecordDecl*, 32> &IndirectPrimary) {
   for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
          e = RD->bases_end(); i != e; ++i) {
     const CXXRecordDecl *Base = 
       cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
-    if (i->isVirtual() && !IndirectPrimary.count(Base)) {
-      // Mark it so we don't output it twice.
-      IndirectPrimary.insert(Base);
-      LayoutVirtualBase(Base);
+#if 0
+    const ASTRecordLayout &L = Ctx.getASTRecordLayout(Base);
+    const CXXRecordDecl *PB = L.getPrimaryBase();
+    if (PB && L.getPrimaryBaseWasVirtual()
+        && IndirectPrimary.count(PB)) {
+      int64_t BaseOffset;
+      // FIXME: calculate this.
+      BaseOffset = (1<<63) | (1<<31);
+      VBases.push_back(PB);
+      VBaseOffsets.push_back(BaseOffset);
+    }
+#endif
+    if (i->isVirtual()) {
+      // Mark it so we don't lay it out twice.
+      if (mark.count(Base))
+        continue;
+      if (IndirectPrimary.count(Base)) {
+        int64_t BaseOffset;
+        // FIXME: audit
+        BaseOffset = Offset;
+        // BaseOffset = (1<<63) | (1<<31);
+        VBases.push_back(Base);
+        VBaseOffsets.push_back(BaseOffset);
+      } else
+        LayoutVirtualBase(Base);
     }
     if (Base->getNumVBases())
-      LayoutVirtualBases(Base, IndirectPrimary);
+      LayoutVirtualBases(Base, Offset, mark, IndirectPrimary);
   }
 }
 
@@ -195,7 +218,7 @@
   // Round up the current record size to the base's alignment boundary.
   Size = (Size + (BaseAlign-1)) & ~(BaseAlign-1);
 
-    // Add base class offsets.
+  // Add base class offsets.
   if (IsVirtualBase) {
     VBases.push_back(RD);
     VBaseOffsets.push_back(Size);
@@ -204,6 +227,20 @@
     BaseOffsets.push_back(Size);
   }
 
+  // And now add offsets for all our primary virtual bases as well, so
+  // they all have offsets.
+  const ASTRecordLayout *L = &BaseInfo;
+  const CXXRecordDecl *PB = L->getPrimaryBase();
+  while (PB) {
+    if (L->getPrimaryBaseWasVirtual()) {
+      VBases.push_back(PB);
+      VBaseOffsets.push_back(Size);
+    }
+    PB = L->getPrimaryBase();
+    if (PB)
+      L = &Ctx.getASTRecordLayout(PB);
+  }
+
   // Reserve space for this base.
   Size += BaseSize;
   
@@ -228,7 +265,7 @@
 
   llvm::SmallSet<const CXXRecordDecl*, 32> IndirectPrimary;
 
-  // If this is a C++ class, lay out the nonvirtual bases.
+  // If this is a C++ class, lay out the vtable and the non-virtual bases.
   const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D);
   if (RD) {
     LayoutVtable(RD, IndirectPrimary);
@@ -246,8 +283,10 @@
   NonVirtualSize = Size;
   NonVirtualAlignment = Alignment;
 
-  if (RD)
-    LayoutVirtualBases(RD, IndirectPrimary);
+  if (RD) {
+    llvm::SmallSet<const CXXRecordDecl*, 32> mark;
+    LayoutVirtualBases(RD, 0, mark, IndirectPrimary);
+  }
 
   // Finally, round the size of the total struct up to the alignment of the
   // struct itself.

Modified: cfe/trunk/lib/AST/RecordLayoutBuilder.h
URL: http://llvm.org/viewvc/llvm-project/cfe/trunk/lib/AST/RecordLayoutBuilder.h?rev=79156&r1=79155&r2=79156&view=diff

==============================================================================
--- cfe/trunk/lib/AST/RecordLayoutBuilder.h (original)
+++ cfe/trunk/lib/AST/RecordLayoutBuilder.h Sat Aug 15 20:46:26 2009
@@ -74,6 +74,8 @@
   void LayoutBaseNonVirtually(const CXXRecordDecl *RD, bool IsVBase);
   void LayoutVirtualBase(const CXXRecordDecl *RD);
   void LayoutVirtualBases(const CXXRecordDecl *RD,
+                          int64_t Offset,
+                                 llvm::SmallSet<const CXXRecordDecl*, 32> &mark,
                      llvm::SmallSet<const CXXRecordDecl*, 32> &IndirectPrimary);
   
   /// FinishLayout - Finalize record layout. Adjust record size based on the

Modified: cfe/trunk/lib/CodeGen/CGCXX.cpp
URL: http://llvm.org/viewvc/llvm-project/cfe/trunk/lib/CodeGen/CGCXX.cpp?rev=79156&r1=79155&r2=79156&view=diff

==============================================================================
--- cfe/trunk/lib/CodeGen/CGCXX.cpp (original)
+++ cfe/trunk/lib/CodeGen/CGCXX.cpp Sat Aug 15 20:46:26 2009
@@ -680,6 +680,7 @@
 }
 
 void CodeGenFunction::GenerateVtableForVBases(const CXXRecordDecl *RD,
+                                              const CXXRecordDecl *Class,
                                               llvm::Constant *rtti,
                                          std::vector<llvm::Constant *> &methods,
                    llvm::SmallSet<const CXXRecordDecl *, 32> &IndirectPrimary) {
@@ -690,19 +691,40 @@
     if (i->isVirtual() && !IndirectPrimary.count(Base)) {
       // Mark it so we don't output it twice.
       IndirectPrimary.insert(Base);
-      GenerateVtableForBase(Base, RD, rtti, methods, false, true,
+      GenerateVtableForBase(Base, true, 0, Class, rtti, methods, true,
                             IndirectPrimary);
     }
     if (Base->getNumVBases())
-      GenerateVtableForVBases(Base, rtti, methods, IndirectPrimary);
+      GenerateVtableForVBases(Base, Class, rtti, methods, IndirectPrimary);
+  }
+}
+
+void CodeGenFunction::GenerateVBaseOffsets(
+  std::vector<llvm::Constant *> &methods, const CXXRecordDecl *RD,
+  llvm::SmallSet<const CXXRecordDecl *, 32> &SeenVBase,
+  uint64_t Offset, const ASTRecordLayout &BLayout, llvm::Type *Ptr8Ty) {
+  for (CXXRecordDecl::base_class_const_iterator i =RD->bases_begin(),
+         e = RD->bases_end(); i != e; ++i) {
+    const CXXRecordDecl *Base = 
+      cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
+    if (i->isVirtual() && !SeenVBase.count(Base)) {
+      SeenVBase.insert(Base);
+      int64_t BaseOffset = Offset/8 + BLayout.getVBaseClassOffset(Base) / 8;
+      llvm::Constant *m;
+      m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), BaseOffset);
+      m = llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
+      methods.push_back(m);
+    }
+    GenerateVBaseOffsets(methods, Base, SeenVBase, Offset, BLayout, Ptr8Ty);
   }
 }
 
 void CodeGenFunction::GenerateVtableForBase(const CXXRecordDecl *RD,
+                                            bool forPrimary,
+                                            int64_t Offset,
                                             const CXXRecordDecl *Class,
                                             llvm::Constant *rtti,
                                          std::vector<llvm::Constant *> &methods,
-                                            bool isPrimary,
                                             bool ForVirtualBase,
                    llvm::SmallSet<const CXXRecordDecl *, 32> &IndirectPrimary) {
   llvm::Type *Ptr8Ty;
@@ -712,69 +734,70 @@
   if (RD && !RD->isDynamicClass())
     return;
 
-  const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Class);
+  const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
+  const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase(); 
+  const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
 
-  if (isPrimary) {
-    // The virtual base offsets come first...
-    // FIXME: audit
-    for (CXXRecordDecl::reverse_base_class_const_iterator i
-           = Class->bases_rbegin(),
-           e = Class->bases_rend(); i != e; ++i) {
-      if (!i->isVirtual())
-        continue;
-      const CXXRecordDecl *Base = 
-        cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
-      int64_t BaseOffset = Layout.getVBaseClassOffset(Base) / 8;
-      llvm::Constant *m;
-      m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), BaseOffset);
-      m = llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
-      methods.push_back(m);
-    }
+  // The virtual base offsets come first...
+  // FIXME: Audit, is this right?
+  if (forPrimary || !PrimaryBaseWasVirtual) {
+    llvm::SmallSet<const CXXRecordDecl *, 32> SeenVBase;
+    std::vector<llvm::Constant *> offsets;
+    GenerateVBaseOffsets(offsets, RD, SeenVBase, Offset, Layout, Ptr8Ty);
+    for (std::vector<llvm::Constant *>::reverse_iterator i = offsets.rbegin(),
+           e = offsets.rend(); i != e; ++i)
+      methods.push_back(*i);
   }
   
-  // then comes the the vcall offsets for all our functions...
-  if (isPrimary && ForVirtualBase)
-    GenerateVcalls(methods, Class, Ptr8Ty);
-
-  bool TopPrimary = true;
-  // Primary tables are composed from the chain of primaries.
-  if (isPrimary) {
-    const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase(); 
-    const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
-    if (PrimaryBase) {
-      if (PrimaryBaseWasVirtual)
-        IndirectPrimary.insert(PrimaryBase);
-      TopPrimary = false;
-      GenerateVtableForBase(0, PrimaryBase, rtti, methods, true,
-                            PrimaryBaseWasVirtual, IndirectPrimary);
-    }
+  if (forPrimary || ForVirtualBase) {
+    // then comes the the vcall offsets for all our functions...
+    GenerateVcalls(methods, RD, Ptr8Ty);
+  }
+
+  bool Top = true;
+
+  // vtables are composed from the chain of primaries.
+  if (PrimaryBase) {
+    if (PrimaryBaseWasVirtual)
+      IndirectPrimary.insert(PrimaryBase);
+    Top = false;
+    GenerateVtableForBase(PrimaryBase, true, Offset, Class, rtti, methods,
+                          PrimaryBaseWasVirtual, IndirectPrimary);
   }
+
   // then come the vcall offsets for all our virtual bases.
-  if (!isPrimary && RD && ForVirtualBase)
+  if (!1 && ForVirtualBase)
     GenerateVcalls(methods, RD, Ptr8Ty);
 
-  if (TopPrimary) {
-    if (RD) {
-      int64_t BaseOffset;
-      if (ForVirtualBase)
-        BaseOffset = -(Layout.getVBaseClassOffset(RD) / 8);
-      else
-        BaseOffset = -(Layout.getBaseClassOffset(RD) / 8);
-      m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), BaseOffset);
-      m = llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
-    }
+  if (Top) {
+    int64_t BaseOffset;
+    if (ForVirtualBase) {
+      const ASTRecordLayout &BLayout = getContext().getASTRecordLayout(Class);
+      BaseOffset = -(BLayout.getVBaseClassOffset(RD) / 8);
+    } else
+      BaseOffset = -Offset/8;
+    m = llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext), BaseOffset);
+    m = llvm::ConstantExpr::getIntToPtr(m, Ptr8Ty);
     methods.push_back(m);
     methods.push_back(rtti);
   }
 
-  if (!isPrimary) {
-    if (RD)
-      GenerateMethods(methods, RD, Ptr8Ty);
-    return;
-  }
-
   // And add the virtuals for the class to the primary vtable.
-  GenerateMethods(methods, Class, Ptr8Ty);
+  GenerateMethods(methods, RD, Ptr8Ty);
+
+  // and then the non-virtual bases.
+  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
+         e = RD->bases_end(); i != e; ++i) {
+    if (i->isVirtual())
+      continue;
+    const CXXRecordDecl *Base = 
+      cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
+    if (Base != PrimaryBase || PrimaryBaseWasVirtual) {
+      uint64_t o = Offset + Layout.getBaseClassOffset(Base);
+      GenerateVtableForBase(Base, true, o, Class, rtti, methods, false,
+                            IndirectPrimary);
+    }
+  }
 }
 
 llvm::Value *CodeGenFunction::GenerateVtable(const CXXRecordDecl *RD) {
@@ -787,36 +810,20 @@
   llvm::GlobalVariable::LinkageTypes linktype;
   linktype = llvm::GlobalValue::WeakAnyLinkage;
   std::vector<llvm::Constant *> methods;
-  llvm::Type *Ptr8Ty = llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext), 0);
+  llvm::Type *Ptr8Ty=llvm::PointerType::get(llvm::Type::getInt8Ty(VMContext),0);
   int64_t Offset = 0;
   llvm::Constant *rtti = GenerateRtti(RD);
 
   Offset += LLVMPointerWidth;
   Offset += LLVMPointerWidth;
 
-  const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
-  const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
-  const bool PrimaryBaseWasVirtual = Layout.getPrimaryBaseWasVirtual();
   llvm::SmallSet<const CXXRecordDecl *, 32> IndirectPrimary;
 
-  // The primary base comes first.
-  GenerateVtableForBase(PrimaryBase, RD, rtti, methods, true,
-                        PrimaryBaseWasVirtual, IndirectPrimary);
-
-  // Then come the non-virtual bases.
-  for (CXXRecordDecl::base_class_const_iterator i = RD->bases_begin(),
-         e = RD->bases_end(); i != e; ++i) {
-    if (i->isVirtual())
-      continue;
-    const CXXRecordDecl *Base = 
-      cast<CXXRecordDecl>(i->getType()->getAs<RecordType>()->getDecl());
-    if (Base != PrimaryBase || PrimaryBaseWasVirtual)
-      GenerateVtableForBase(Base, RD, rtti, methods, false, false,
-                            IndirectPrimary);
-  }
+  // First comes the vtables for all the non-virtual bases...
+  GenerateVtableForBase(RD, true, 0, RD, rtti, methods, false, IndirectPrimary);
 
-  // Then come the vtables for all the virtual bases.
-  GenerateVtableForVBases(RD, rtti, methods, IndirectPrimary);
+  // then the vtables for all the virtual bases.
+  GenerateVtableForVBases(RD, RD, rtti, methods, IndirectPrimary);
 
   llvm::Constant *C;
   llvm::ArrayType *type = llvm::ArrayType::get(Ptr8Ty, methods.size());
@@ -825,7 +832,7 @@
                                                  linktype, C, Name);
   vtable = Builder.CreateBitCast(vtable, Ptr8Ty);
   vtable = Builder.CreateGEP(vtable,
-                             llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext),
+                       llvm::ConstantInt::get(llvm::Type::getInt64Ty(VMContext),
                                                     Offset/8));
   return vtable;
 }

Modified: cfe/trunk/lib/CodeGen/CodeGenFunction.h
URL: http://llvm.org/viewvc/llvm-project/cfe/trunk/lib/CodeGen/CodeGenFunction.h?rev=79156&r1=79155&r2=79156&view=diff

==============================================================================
--- cfe/trunk/lib/CodeGen/CodeGenFunction.h (original)
+++ cfe/trunk/lib/CodeGen/CodeGenFunction.h Sat Aug 15 20:46:26 2009
@@ -362,19 +362,26 @@
   void FinishFunction(SourceLocation EndLoc=SourceLocation());
 
   llvm::Constant *GenerateRtti(const CXXRecordDecl *RD);
+  void GenerateVBaseOffsets(std::vector<llvm::Constant *> &methods,
+                            const CXXRecordDecl *RD, 
+                           llvm::SmallSet<const CXXRecordDecl *, 32> &SeenVBase,
+                            uint64_t Offset,
+                            const ASTRecordLayout &Layout, llvm::Type *Ptr8Ty);
   void GenerateVcalls(std::vector<llvm::Constant *> &methods,
                       const CXXRecordDecl *RD, llvm::Type *Ptr8Ty);
   void GenerateMethods(std::vector<llvm::Constant *> &methods,
                        const CXXRecordDecl *RD, llvm::Type *Ptr8Ty);
-void GenerateVtableForVBases(const CXXRecordDecl *RD,
-                             llvm::Constant *rtti,
-                             std::vector<llvm::Constant *> &methods,
+  void GenerateVtableForVBases(const CXXRecordDecl *RD,
+                               const CXXRecordDecl *Class,
+                               llvm::Constant *rtti,
+                               std::vector<llvm::Constant *> &methods,
                     llvm::SmallSet<const CXXRecordDecl *, 32> &IndirectPrimary);
   void GenerateVtableForBase(const CXXRecordDecl *RD,
+                             bool ForPrimary,
+                             int64_t Offset,
                              const CXXRecordDecl *Class,
                              llvm::Constant *rtti,
                              std::vector<llvm::Constant *> &methods,
-                             bool isPrimary,
                              bool ForVirtualBase,
                     llvm::SmallSet<const CXXRecordDecl *, 32> &IndirectPrimary);
   llvm::Value *GenerateVtable(const CXXRecordDecl *RD);

Modified: cfe/trunk/test/CodeGenCXX/virt.cpp
URL: http://llvm.org/viewvc/llvm-project/cfe/trunk/test/CodeGenCXX/virt.cpp?rev=79156&r1=79155&r2=79156&view=diff

==============================================================================
--- cfe/trunk/test/CodeGenCXX/virt.cpp (original)
+++ cfe/trunk/test/CodeGenCXX/virt.cpp Sat Aug 15 20:46:26 2009
@@ -204,12 +204,12 @@
 };
 
 // CHECK-LP32:__ZTV7test5_D:
-// CHECK-LP32 .long 16
-// CHECK-LP32 .long 12
-// CHECK-LP32 .long 8
+// CHECK-LP32: .long 16
+// CHECK-LP32: .long 12
+// CHECK-LP32: .long 8
 // CHECK-LP32 .long 8
 // CHECK-LP32 .long 8
-// CHECK-LP32 .long 4
+// CHECK-LP32: .long 4
 // CHECK-LP32 .long 4
 // CHECK-LP32 .long 4
 // CHECK-LP32: .space 4
@@ -224,44 +224,44 @@
 // CHECK-LP32: .long __ZN8test5_B26funcB2Ev
 // CHECK-LP32: .long __ZN8test5_B16funcB1Ev
 // CHECK-LP32: .long __ZN7test5_D5funcDEv
-// CHECK-LP32 .space 4
-// CHECK-LP32 .space 4
-// CHECK-LP32 .space 4
-// CHECK-LP32 .space 4
 // CHECK-LP32: .space 4
-// CHECK-LP32: .long 4294967292
+// CHECK-LP32: .space 4
+// CHECK-LP32: .space 4
+// CHECK-LP32: .space 4
+// CHECK-LP32: .space 4
+// CHECK-LP32 .long 4294967292
 // CHECK-LP32: .long __ZTI7test5_D
 // CHECK-LP32: .long __ZN9test5_B237funcB23Ev
-// CHECK-LP32 .long __ZN9test5_B227funcB22Ev
-// CHECK-LP32 .long __ZN9test5_B217funcB21Ev
-// CHECK-LP32 .space 4
-// CHECK-LP32 .long 8
-// CHECK-LP32 .space 4
-// CHECK-LP32 .space 4
-// CHECK-LP32 .long 4
+// CHECK-LP32: .long __ZN9test5_B227funcB22Ev
+// CHECK-LP32: .long __ZN9test5_B217funcB21Ev
 // CHECK-LP32 .space 4
+// CHECK-LP32: .long 8
 // CHECK-LP32: .space 4
-// CHECK-LP32: .long 4294967288
-// CHECK-LP32 .long __ZTI7test5_D
-// CHECK-LP32 .long __ZN9test5_B337funcB33Ev
-// CHECK-LP32 .long __ZN9test5_B327funcB32Ev
+// CHECK-LP32: .space 4
+// CHECK-LP32: .long 4
+// CHECK-LP32: .space 4
+// CHECK-LP32: .space 4
+// CHECK-LP32 .long 4294967288
+// CHECK-LP32: .long __ZTI7test5_D
+// CHECK-LP32: .long __ZN9test5_B337funcB33Ev
+// CHECK-LP32: .long __ZN9test5_B327funcB32Ev
 // CHECK-LP32: .long __ZN9test5_B317funcB31Ev
 // CHECK-LP32: .space 4
-// CHECK-LP32 .long -12
+// CHECK-LP32: .long 4294967284
 // CHECK-LP32: .long __ZTI7test5_D
 // CHECK-LP32: .long __ZN4B2328funcB232Ev
 // CHECK-LP32: .space 4
-// CHECK-LP32 .long -16
+// CHECK-LP32:.long 4294967280
 // CHECK-LP32: .long __ZTI7test5_D
 // CHECK-LP32: .long __ZN4B2318funcB231Ev
 
 // CHECK-LP64:__ZTV7test5_D:
-// CHECK-LP64 .quad 32
-// CHECK-LP64 .quad 24
-// CHECK-LP64 .quad 16
+// CHECK-LP64: .quad 32
+// CHECK-LP64: .quad 24
+// CHECK-LP64: .quad 16
 // CHECK-LP64 .quad 16
 // CHECK-LP64 .quad 16
-// CHECK-LP64 .quad 8
+// CHECK-LP64: .quad 8
 // CHECK-LP64 .quad 8
 // CHECK-LP64 .quad 8
 // CHECK-LP64: .space 8
@@ -276,37 +276,116 @@
 // CHECK-LP64: .quad __ZN8test5_B26funcB2Ev
 // CHECK-LP64: .quad __ZN8test5_B16funcB1Ev
 // CHECK-LP64: .quad __ZN7test5_D5funcDEv
-// CHECK-LP64 .space 8
-// CHECK-LP64 .space 8
-// CHECK-LP64 .space 8
-// CHECK-LP64 .space 8
 // CHECK-LP64: .space 8
-// CHECK-LP64:.quad 18446744073709551608
+// CHECK-LP64: .space 8
+// CHECK-LP64: .space 8
+// CHECK-LP64: .space 8
+// CHECK-LP64: .space 8
+// CHECK-LP64 .quad 18446744073709551608
 // CHECK-LP64: .quad __ZTI7test5_D
 // CHECK-LP64: .quad __ZN9test5_B237funcB23Ev
-// CHECK-LP64 .quad __ZN9test5_B227funcB22Ev
-// CHECK-LP64 .quad __ZN9test5_B217funcB21Ev
-// CHECK-LP64 .space 8
-// CHECK-LP64 .quad 16
-// CHECK-LP64 .space 8
-// CHECK-LP64 .space 8
-// CHECK-LP64 .quad 8
+// CHECK-LP64: .quad __ZN9test5_B227funcB22Ev
+// CHECK-LP64: .quad __ZN9test5_B217funcB21Ev
 // CHECK-LP64 .space 8
+// CHECK-LP64: .quad 16
 // CHECK-LP64: .space 8
-// CHECK-LP64: .quad 18446744073709551600
-// CHECK-LP64 .quad __ZTI7test5_D
-// CHECK-LP64 .quad __ZN9test5_B337funcB33Ev
-// CHECK-LP64 .quad __ZN9test5_B327funcB32Ev
+// CHECK-LP64: .space 8
+// CHECK-LP64: .quad 8
+// CHECK-LP64: .space 8
+// CHECK-LP64: .space 8
+// CHECK-LP64 .quad 18446744073709551600
+// CHECK-LP64: .quad __ZTI7test5_D
+// CHECK-LP64: .quad __ZN9test5_B337funcB33Ev
+// CHECK-LP64: .quad __ZN9test5_B327funcB32Ev
 // CHECK-LP64: .quad __ZN9test5_B317funcB31Ev
 // CHECK-LP64: .space 8
-// CHECK-LP64 .quad 18446744073709551592
+// CHECK-LP64: .quad 18446744073709551592
 // CHECK-LP64: .quad __ZTI7test5_D
 // CHECK-LP64: .quad __ZN4B2328funcB232Ev
 // CHECK-LP64: .space 8
-// CHECK-LP64 .quad 18446744073709551584
+// CHECK-LP64:.quad 18446744073709551584
 // CHECK-LP64: .quad __ZTI7test5_D
 // CHECK-LP64: .quad __ZN4B2318funcB231Ev
 
+struct test8_B1 {
+  virtual void ftest8_B1() { }
+};
+struct test8_B2aa {
+  virtual void ftest8_B2aa() { }
+  int i;
+};
+struct test8_B2ab {
+  virtual void ftest8_B2ab() { }
+  int i;
+};
+struct test8_B2a : virtual test8_B2aa, virtual test8_B2ab {
+  virtual void ftest8_B2a() { }
+};
+struct test8_B2b {
+  virtual void ftest8_B2b() { }
+};
+struct test8_B2 : test8_B2a, test8_B2b {
+  virtual void ftest8_B2() { }
+};
+struct test8_B3 {
+  virtual void ftest8_B3() { }
+};
+class test8_D : test8_B1, test8_B2, test8_B3 {
+} d8;
+
+// CHECK-LP32:__ZTV7test8_D:
+// CHECK-LP32: .long 24
+// CHECK-LP32: .long 16
+// CHECK-LP32: .space 4
+// CHECK-LP32: .long __ZTI7test8_D
+// CHECK-LP32: .long __ZN8test8_B19ftest8_B1Ev
+// CHECK-LP32: .long 20
+// CHECK-LP32: .long 12
+// CHECK-LP32: .long 4294967292
+// CHECK-LP32: .long __ZTI7test8_D
+// CHECK-LP32: .long __ZN9test8_B2a10ftest8_B2aEv
+// CHECK-LP32: .long __ZN8test8_B29ftest8_B2Ev
+// CHECK-LP32: .long 4294967288
+// CHECK-LP32: .long __ZTI7test8_D
+// CHECK-LP32: .long __ZN9test8_B2b10ftest8_B2bEv
+// CHECK-LP32: .long 4294967284
+// CHECK-LP32: .long __ZTI7test8_D
+// CHECK-LP32: .long __ZN8test8_B39ftest8_B3Ev
+// CHECK-LP32: .space 4
+// CHECK-LP32: .long 4294967280
+// CHECK-LP32: .long __ZTI7test8_D
+// CHECK-LP32: .long __ZN10test8_B2aa11ftest8_B2aaEv
+// CHECK-LP32: .space 4
+// CHECK-LP32: .long 4294967272
+// CHECK-LP32: .long __ZTI7test8_D
+// CHECK-LP32: .long __ZN10test8_B2ab11ftest8_B2abEv
+
+// CHECK-LP64:__ZTV7test8_D:
+// CHECK-LP64: .quad 48
+// CHECK-LP64: .quad 32
+// CHECK-LP64: .space 8
+// CHECK-LP64: .quad __ZTI7test8_D
+// CHECK-LP64: .quad __ZN8test8_B19ftest8_B1Ev
+// CHECK-LP64: .quad 40
+// CHECK-LP64: .quad 24
+// CHECK-LP64: .quad 18446744073709551608
+// CHECK-LP64: .quad __ZTI7test8_D
+// CHECK-LP64: .quad __ZN9test8_B2a10ftest8_B2aEv
+// CHECK-LP64: .quad __ZN8test8_B29ftest8_B2Ev
+// CHECK-LP64: .quad 18446744073709551600
+// CHECK-LP64: .quad __ZTI7test8_D
+// CHECK-LP64: .quad __ZN9test8_B2b10ftest8_B2bEv
+// CHECK-LP64: .quad 18446744073709551592
+// CHECK-LP64: .quad __ZTI7test8_D
+// CHECK-LP64: .quad __ZN8test8_B39ftest8_B3Ev
+// CHECK-LP64: .space 8
+// CHECK-LP64: .quad 18446744073709551584
+// CHECK-LP64: .quad __ZTI7test8_D
+// CHECK-LP64: .quad __ZN10test8_B2aa11ftest8_B2aaEv
+// CHECK-LP64: .space 8
+// CHECK-LP64: .quad 18446744073709551568
+// CHECK-LP64: .quad __ZTI7test8_D
+// CHECK-LP64: .quad __ZN10test8_B2ab11ftest8_B2abEv
 
 
 





More information about the cfe-commits mailing list