clang API Documentation

RecordLayoutBuilder.cpp
Go to the documentation of this file.
00001 //=== RecordLayoutBuilder.cpp - Helper class for building record layouts ---==//
00002 //
00003 //                     The LLVM Compiler Infrastructure
00004 //
00005 // This file is distributed under the University of Illinois Open Source
00006 // License. See LICENSE.TXT for details.
00007 //
00008 //===----------------------------------------------------------------------===//
00009 
00010 #include "clang/AST/RecordLayout.h"
00011 #include "clang/AST/ASTContext.h"
00012 #include "clang/AST/Attr.h"
00013 #include "clang/AST/CXXInheritance.h"
00014 #include "clang/AST/Decl.h"
00015 #include "clang/AST/DeclCXX.h"
00016 #include "clang/AST/DeclObjC.h"
00017 #include "clang/AST/Expr.h"
00018 #include "clang/Basic/TargetInfo.h"
00019 #include "clang/Sema/SemaDiagnostic.h"
00020 #include "llvm/ADT/SmallSet.h"
00021 #include "llvm/Support/CrashRecoveryContext.h"
00022 #include "llvm/Support/Format.h"
00023 #include "llvm/Support/MathExtras.h"
00024 
00025 using namespace clang;
00026 
00027 namespace {
00028 
00029 /// BaseSubobjectInfo - Represents a single base subobject in a complete class.
00030 /// For a class hierarchy like
00031 ///
00032 /// class A { };
00033 /// class B : A { };
00034 /// class C : A, B { };
00035 ///
00036 /// The BaseSubobjectInfo graph for C will have three BaseSubobjectInfo
00037 /// instances, one for B and two for A.
00038 ///
00039 /// If a base is virtual, it will only have one BaseSubobjectInfo allocated.
00040 struct BaseSubobjectInfo {
00041   /// Class - The class for this base info.
00042   const CXXRecordDecl *Class;
00043 
00044   /// IsVirtual - Whether the BaseInfo represents a virtual base or not.
00045   bool IsVirtual;
00046 
00047   /// Bases - Information about the base subobjects.
00048   SmallVector<BaseSubobjectInfo*, 4> Bases;
00049 
00050   /// PrimaryVirtualBaseInfo - Holds the base info for the primary virtual base
00051   /// of this base info (if one exists).
00052   BaseSubobjectInfo *PrimaryVirtualBaseInfo;
00053 
00054   // FIXME: Document.
00055   const BaseSubobjectInfo *Derived;
00056 };
00057 
00058 /// EmptySubobjectMap - Keeps track of which empty subobjects exist at different
00059 /// offsets while laying out a C++ class.
00060 class EmptySubobjectMap {
00061   const ASTContext &Context;
00062   uint64_t CharWidth;
00063   
00064   /// Class - The class whose empty entries we're keeping track of.
00065   const CXXRecordDecl *Class;
00066 
00067   /// EmptyClassOffsets - A map from offsets to empty record decls.
00068   typedef llvm::TinyPtrVector<const CXXRecordDecl *> ClassVectorTy;
00069   typedef llvm::DenseMap<CharUnits, ClassVectorTy> EmptyClassOffsetsMapTy;
00070   EmptyClassOffsetsMapTy EmptyClassOffsets;
00071   
00072   /// MaxEmptyClassOffset - The highest offset known to contain an empty
00073   /// base subobject.
00074   CharUnits MaxEmptyClassOffset;
00075   
00076   /// ComputeEmptySubobjectSizes - Compute the size of the largest base or
00077   /// member subobject that is empty.
00078   void ComputeEmptySubobjectSizes();
00079   
00080   void AddSubobjectAtOffset(const CXXRecordDecl *RD, CharUnits Offset);
00081   
00082   void UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info,
00083                                  CharUnits Offset, bool PlacingEmptyBase);
00084   
00085   void UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD, 
00086                                   const CXXRecordDecl *Class,
00087                                   CharUnits Offset);
00088   void UpdateEmptyFieldSubobjects(const FieldDecl *FD, CharUnits Offset);
00089   
00090   /// AnyEmptySubobjectsBeyondOffset - Returns whether there are any empty
00091   /// subobjects beyond the given offset.
00092   bool AnyEmptySubobjectsBeyondOffset(CharUnits Offset) const {
00093     return Offset <= MaxEmptyClassOffset;
00094   }
00095 
00096   CharUnits 
00097   getFieldOffset(const ASTRecordLayout &Layout, unsigned FieldNo) const {
00098     uint64_t FieldOffset = Layout.getFieldOffset(FieldNo);
00099     assert(FieldOffset % CharWidth == 0 && 
00100            "Field offset not at char boundary!");
00101 
00102     return Context.toCharUnitsFromBits(FieldOffset);
00103   }
00104 
00105 protected:
00106   bool CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
00107                                  CharUnits Offset) const;
00108 
00109   bool CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
00110                                      CharUnits Offset);
00111 
00112   bool CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD, 
00113                                       const CXXRecordDecl *Class,
00114                                       CharUnits Offset) const;
00115   bool CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
00116                                       CharUnits Offset) const;
00117 
00118 public:
00119   /// This holds the size of the largest empty subobject (either a base
00120   /// or a member). Will be zero if the record being built doesn't contain
00121   /// any empty classes.
00122   CharUnits SizeOfLargestEmptySubobject;
00123 
00124   EmptySubobjectMap(const ASTContext &Context, const CXXRecordDecl *Class)
00125   : Context(Context), CharWidth(Context.getCharWidth()), Class(Class) {
00126       ComputeEmptySubobjectSizes();
00127   }
00128 
00129   /// CanPlaceBaseAtOffset - Return whether the given base class can be placed
00130   /// at the given offset.
00131   /// Returns false if placing the record will result in two components
00132   /// (direct or indirect) of the same type having the same offset.
00133   bool CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
00134                             CharUnits Offset);
00135 
00136   /// CanPlaceFieldAtOffset - Return whether a field can be placed at the given
00137   /// offset.
00138   bool CanPlaceFieldAtOffset(const FieldDecl *FD, CharUnits Offset);
00139 };
00140 
00141 void EmptySubobjectMap::ComputeEmptySubobjectSizes() {
00142   // Check the bases.
00143   for (const CXXBaseSpecifier &Base : Class->bases()) {
00144     const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
00145 
00146     CharUnits EmptySize;
00147     const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
00148     if (BaseDecl->isEmpty()) {
00149       // If the class decl is empty, get its size.
00150       EmptySize = Layout.getSize();
00151     } else {
00152       // Otherwise, we get the largest empty subobject for the decl.
00153       EmptySize = Layout.getSizeOfLargestEmptySubobject();
00154     }
00155 
00156     if (EmptySize > SizeOfLargestEmptySubobject)
00157       SizeOfLargestEmptySubobject = EmptySize;
00158   }
00159 
00160   // Check the fields.
00161   for (const FieldDecl *FD : Class->fields()) {
00162     const RecordType *RT =
00163         Context.getBaseElementType(FD->getType())->getAs<RecordType>();
00164 
00165     // We only care about record types.
00166     if (!RT)
00167       continue;
00168 
00169     CharUnits EmptySize;
00170     const CXXRecordDecl *MemberDecl = RT->getAsCXXRecordDecl();
00171     const ASTRecordLayout &Layout = Context.getASTRecordLayout(MemberDecl);
00172     if (MemberDecl->isEmpty()) {
00173       // If the class decl is empty, get its size.
00174       EmptySize = Layout.getSize();
00175     } else {
00176       // Otherwise, we get the largest empty subobject for the decl.
00177       EmptySize = Layout.getSizeOfLargestEmptySubobject();
00178     }
00179 
00180     if (EmptySize > SizeOfLargestEmptySubobject)
00181       SizeOfLargestEmptySubobject = EmptySize;
00182   }
00183 }
00184 
00185 bool
00186 EmptySubobjectMap::CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD, 
00187                                              CharUnits Offset) const {
00188   // We only need to check empty bases.
00189   if (!RD->isEmpty())
00190     return true;
00191 
00192   EmptyClassOffsetsMapTy::const_iterator I = EmptyClassOffsets.find(Offset);
00193   if (I == EmptyClassOffsets.end())
00194     return true;
00195 
00196   const ClassVectorTy &Classes = I->second;
00197   if (std::find(Classes.begin(), Classes.end(), RD) == Classes.end())
00198     return true;
00199 
00200   // There is already an empty class of the same type at this offset.
00201   return false;
00202 }
00203   
00204 void EmptySubobjectMap::AddSubobjectAtOffset(const CXXRecordDecl *RD, 
00205                                              CharUnits Offset) {
00206   // We only care about empty bases.
00207   if (!RD->isEmpty())
00208     return;
00209 
00210   // If we have empty structures inside a union, we can assign both
00211   // the same offset. Just avoid pushing them twice in the list.
00212   ClassVectorTy &Classes = EmptyClassOffsets[Offset];
00213   if (std::find(Classes.begin(), Classes.end(), RD) != Classes.end())
00214     return;
00215   
00216   Classes.push_back(RD);
00217   
00218   // Update the empty class offset.
00219   if (Offset > MaxEmptyClassOffset)
00220     MaxEmptyClassOffset = Offset;
00221 }
00222 
00223 bool
00224 EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
00225                                                  CharUnits Offset) {
00226   // We don't have to keep looking past the maximum offset that's known to
00227   // contain an empty class.
00228   if (!AnyEmptySubobjectsBeyondOffset(Offset))
00229     return true;
00230 
00231   if (!CanPlaceSubobjectAtOffset(Info->Class, Offset))
00232     return false;
00233 
00234   // Traverse all non-virtual bases.
00235   const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
00236   for (const BaseSubobjectInfo *Base : Info->Bases) {
00237     if (Base->IsVirtual)
00238       continue;
00239 
00240     CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
00241 
00242     if (!CanPlaceBaseSubobjectAtOffset(Base, BaseOffset))
00243       return false;
00244   }
00245 
00246   if (Info->PrimaryVirtualBaseInfo) {
00247     BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
00248 
00249     if (Info == PrimaryVirtualBaseInfo->Derived) {
00250       if (!CanPlaceBaseSubobjectAtOffset(PrimaryVirtualBaseInfo, Offset))
00251         return false;
00252     }
00253   }
00254   
00255   // Traverse all member variables.
00256   unsigned FieldNo = 0;
00257   for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(), 
00258        E = Info->Class->field_end(); I != E; ++I, ++FieldNo) {
00259     if (I->isBitField())
00260       continue;
00261 
00262     CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
00263     if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
00264       return false;
00265   }
00266 
00267   return true;
00268 }
00269 
00270 void EmptySubobjectMap::UpdateEmptyBaseSubobjects(const BaseSubobjectInfo *Info, 
00271                                                   CharUnits Offset,
00272                                                   bool PlacingEmptyBase) {
00273   if (!PlacingEmptyBase && Offset >= SizeOfLargestEmptySubobject) {
00274     // We know that the only empty subobjects that can conflict with empty
00275     // subobject of non-empty bases, are empty bases that can be placed at
00276     // offset zero. Because of this, we only need to keep track of empty base 
00277     // subobjects with offsets less than the size of the largest empty
00278     // subobject for our class.    
00279     return;
00280   }
00281 
00282   AddSubobjectAtOffset(Info->Class, Offset);
00283 
00284   // Traverse all non-virtual bases.
00285   const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
00286   for (const BaseSubobjectInfo *Base : Info->Bases) {
00287     if (Base->IsVirtual)
00288       continue;
00289 
00290     CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
00291     UpdateEmptyBaseSubobjects(Base, BaseOffset, PlacingEmptyBase);
00292   }
00293 
00294   if (Info->PrimaryVirtualBaseInfo) {
00295     BaseSubobjectInfo *PrimaryVirtualBaseInfo = Info->PrimaryVirtualBaseInfo;
00296     
00297     if (Info == PrimaryVirtualBaseInfo->Derived)
00298       UpdateEmptyBaseSubobjects(PrimaryVirtualBaseInfo, Offset,
00299                                 PlacingEmptyBase);
00300   }
00301 
00302   // Traverse all member variables.
00303   unsigned FieldNo = 0;
00304   for (CXXRecordDecl::field_iterator I = Info->Class->field_begin(), 
00305        E = Info->Class->field_end(); I != E; ++I, ++FieldNo) {
00306     if (I->isBitField())
00307       continue;
00308 
00309     CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
00310     UpdateEmptyFieldSubobjects(*I, FieldOffset);
00311   }
00312 }
00313 
00314 bool EmptySubobjectMap::CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
00315                                              CharUnits Offset) {
00316   // If we know this class doesn't have any empty subobjects we don't need to
00317   // bother checking.
00318   if (SizeOfLargestEmptySubobject.isZero())
00319     return true;
00320 
00321   if (!CanPlaceBaseSubobjectAtOffset(Info, Offset))
00322     return false;
00323 
00324   // We are able to place the base at this offset. Make sure to update the
00325   // empty base subobject map.
00326   UpdateEmptyBaseSubobjects(Info, Offset, Info->Class->isEmpty());
00327   return true;
00328 }
00329 
00330 bool
00331 EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD, 
00332                                                   const CXXRecordDecl *Class,
00333                                                   CharUnits Offset) const {
00334   // We don't have to keep looking past the maximum offset that's known to
00335   // contain an empty class.
00336   if (!AnyEmptySubobjectsBeyondOffset(Offset))
00337     return true;
00338 
00339   if (!CanPlaceSubobjectAtOffset(RD, Offset))
00340     return false;
00341   
00342   const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
00343 
00344   // Traverse all non-virtual bases.
00345   for (const CXXBaseSpecifier &Base : RD->bases()) {
00346     if (Base.isVirtual())
00347       continue;
00348 
00349     const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
00350 
00351     CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
00352     if (!CanPlaceFieldSubobjectAtOffset(BaseDecl, Class, BaseOffset))
00353       return false;
00354   }
00355 
00356   if (RD == Class) {
00357     // This is the most derived class, traverse virtual bases as well.
00358     for (const CXXBaseSpecifier &Base : RD->vbases()) {
00359       const CXXRecordDecl *VBaseDecl = Base.getType()->getAsCXXRecordDecl();
00360 
00361       CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
00362       if (!CanPlaceFieldSubobjectAtOffset(VBaseDecl, Class, VBaseOffset))
00363         return false;
00364     }
00365   }
00366     
00367   // Traverse all member variables.
00368   unsigned FieldNo = 0;
00369   for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
00370        I != E; ++I, ++FieldNo) {
00371     if (I->isBitField())
00372       continue;
00373 
00374     CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
00375     
00376     if (!CanPlaceFieldSubobjectAtOffset(*I, FieldOffset))
00377       return false;
00378   }
00379 
00380   return true;
00381 }
00382 
00383 bool
00384 EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
00385                                                   CharUnits Offset) const {
00386   // We don't have to keep looking past the maximum offset that's known to
00387   // contain an empty class.
00388   if (!AnyEmptySubobjectsBeyondOffset(Offset))
00389     return true;
00390   
00391   QualType T = FD->getType();
00392   if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
00393     return CanPlaceFieldSubobjectAtOffset(RD, RD, Offset);
00394 
00395   // If we have an array type we need to look at every element.
00396   if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) {
00397     QualType ElemTy = Context.getBaseElementType(AT);
00398     const RecordType *RT = ElemTy->getAs<RecordType>();
00399     if (!RT)
00400       return true;
00401 
00402     const CXXRecordDecl *RD = RT->getAsCXXRecordDecl();
00403     const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
00404 
00405     uint64_t NumElements = Context.getConstantArrayElementCount(AT);
00406     CharUnits ElementOffset = Offset;
00407     for (uint64_t I = 0; I != NumElements; ++I) {
00408       // We don't have to keep looking past the maximum offset that's known to
00409       // contain an empty class.
00410       if (!AnyEmptySubobjectsBeyondOffset(ElementOffset))
00411         return true;
00412       
00413       if (!CanPlaceFieldSubobjectAtOffset(RD, RD, ElementOffset))
00414         return false;
00415 
00416       ElementOffset += Layout.getSize();
00417     }
00418   }
00419 
00420   return true;
00421 }
00422 
00423 bool
00424 EmptySubobjectMap::CanPlaceFieldAtOffset(const FieldDecl *FD, 
00425                                          CharUnits Offset) {
00426   if (!CanPlaceFieldSubobjectAtOffset(FD, Offset))
00427     return false;
00428   
00429   // We are able to place the member variable at this offset.
00430   // Make sure to update the empty base subobject map.
00431   UpdateEmptyFieldSubobjects(FD, Offset);
00432   return true;
00433 }
00434 
00435 void EmptySubobjectMap::UpdateEmptyFieldSubobjects(const CXXRecordDecl *RD, 
00436                                                    const CXXRecordDecl *Class,
00437                                                    CharUnits Offset) {
00438   // We know that the only empty subobjects that can conflict with empty
00439   // field subobjects are subobjects of empty bases that can be placed at offset
00440   // zero. Because of this, we only need to keep track of empty field 
00441   // subobjects with offsets less than the size of the largest empty
00442   // subobject for our class.
00443   if (Offset >= SizeOfLargestEmptySubobject)
00444     return;
00445 
00446   AddSubobjectAtOffset(RD, Offset);
00447 
00448   const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
00449 
00450   // Traverse all non-virtual bases.
00451   for (const CXXBaseSpecifier &Base : RD->bases()) {
00452     if (Base.isVirtual())
00453       continue;
00454 
00455     const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
00456 
00457     CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(BaseDecl);
00458     UpdateEmptyFieldSubobjects(BaseDecl, Class, BaseOffset);
00459   }
00460 
00461   if (RD == Class) {
00462     // This is the most derived class, traverse virtual bases as well.
00463     for (const CXXBaseSpecifier &Base : RD->vbases()) {
00464       const CXXRecordDecl *VBaseDecl = Base.getType()->getAsCXXRecordDecl();
00465 
00466       CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBaseDecl);
00467       UpdateEmptyFieldSubobjects(VBaseDecl, Class, VBaseOffset);
00468     }
00469   }
00470   
00471   // Traverse all member variables.
00472   unsigned FieldNo = 0;
00473   for (CXXRecordDecl::field_iterator I = RD->field_begin(), E = RD->field_end();
00474        I != E; ++I, ++FieldNo) {
00475     if (I->isBitField())
00476       continue;
00477 
00478     CharUnits FieldOffset = Offset + getFieldOffset(Layout, FieldNo);
00479 
00480     UpdateEmptyFieldSubobjects(*I, FieldOffset);
00481   }
00482 }
00483   
00484 void EmptySubobjectMap::UpdateEmptyFieldSubobjects(const FieldDecl *FD,
00485                                                    CharUnits Offset) {
00486   QualType T = FD->getType();
00487   if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl()) {
00488     UpdateEmptyFieldSubobjects(RD, RD, Offset);
00489     return;
00490   }
00491 
00492   // If we have an array type we need to update every element.
00493   if (const ConstantArrayType *AT = Context.getAsConstantArrayType(T)) {
00494     QualType ElemTy = Context.getBaseElementType(AT);
00495     const RecordType *RT = ElemTy->getAs<RecordType>();
00496     if (!RT)
00497       return;
00498 
00499     const CXXRecordDecl *RD = RT->getAsCXXRecordDecl();
00500     const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
00501     
00502     uint64_t NumElements = Context.getConstantArrayElementCount(AT);
00503     CharUnits ElementOffset = Offset;
00504     
00505     for (uint64_t I = 0; I != NumElements; ++I) {
00506       // We know that the only empty subobjects that can conflict with empty
00507       // field subobjects are subobjects of empty bases that can be placed at 
00508       // offset zero. Because of this, we only need to keep track of empty field
00509       // subobjects with offsets less than the size of the largest empty
00510       // subobject for our class.
00511       if (ElementOffset >= SizeOfLargestEmptySubobject)
00512         return;
00513 
00514       UpdateEmptyFieldSubobjects(RD, RD, ElementOffset);
00515       ElementOffset += Layout.getSize();
00516     }
00517   }
00518 }
00519 
00520 typedef llvm::SmallPtrSet<const CXXRecordDecl*, 4> ClassSetTy;
00521 
00522 class RecordLayoutBuilder {
00523 protected:
00524   // FIXME: Remove this and make the appropriate fields public.
00525   friend class clang::ASTContext;
00526 
00527   const ASTContext &Context;
00528 
00529   EmptySubobjectMap *EmptySubobjects;
00530 
00531   /// Size - The current size of the record layout.
00532   uint64_t Size;
00533 
00534   /// Alignment - The current alignment of the record layout.
00535   CharUnits Alignment;
00536 
00537   /// \brief The alignment if attribute packed is not used.
00538   CharUnits UnpackedAlignment;
00539 
00540   SmallVector<uint64_t, 16> FieldOffsets;
00541 
00542   /// \brief Whether the external AST source has provided a layout for this
00543   /// record.
00544   unsigned ExternalLayout : 1;
00545 
00546   /// \brief Whether we need to infer alignment, even when we have an 
00547   /// externally-provided layout.
00548   unsigned InferAlignment : 1;
00549   
00550   /// Packed - Whether the record is packed or not.
00551   unsigned Packed : 1;
00552 
00553   unsigned IsUnion : 1;
00554 
00555   unsigned IsMac68kAlign : 1;
00556   
00557   unsigned IsMsStruct : 1;
00558 
00559   /// UnfilledBitsInLastUnit - If the last field laid out was a bitfield,
00560   /// this contains the number of bits in the last unit that can be used for
00561   /// an adjacent bitfield if necessary.  The unit in question is usually
00562   /// a byte, but larger units are used if IsMsStruct.
00563   unsigned char UnfilledBitsInLastUnit;
00564   /// LastBitfieldTypeSize - If IsMsStruct, represents the size of the type
00565   /// of the previous field if it was a bitfield.
00566   unsigned char LastBitfieldTypeSize;
00567 
00568   /// MaxFieldAlignment - The maximum allowed field alignment. This is set by
00569   /// #pragma pack.
00570   CharUnits MaxFieldAlignment;
00571 
00572   /// DataSize - The data size of the record being laid out.
00573   uint64_t DataSize;
00574 
00575   CharUnits NonVirtualSize;
00576   CharUnits NonVirtualAlignment;
00577 
00578   /// PrimaryBase - the primary base class (if one exists) of the class
00579   /// we're laying out.
00580   const CXXRecordDecl *PrimaryBase;
00581 
00582   /// PrimaryBaseIsVirtual - Whether the primary base of the class we're laying
00583   /// out is virtual.
00584   bool PrimaryBaseIsVirtual;
00585 
00586   /// HasOwnVFPtr - Whether the class provides its own vtable/vftbl
00587   /// pointer, as opposed to inheriting one from a primary base class.
00588   bool HasOwnVFPtr;
00589 
00590   typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy;
00591 
00592   /// Bases - base classes and their offsets in the record.
00593   BaseOffsetsMapTy Bases;
00594 
00595   // VBases - virtual base classes and their offsets in the record.
00596   ASTRecordLayout::VBaseOffsetsMapTy VBases;
00597 
00598   /// IndirectPrimaryBases - Virtual base classes, direct or indirect, that are
00599   /// primary base classes for some other direct or indirect base class.
00600   CXXIndirectPrimaryBaseSet IndirectPrimaryBases;
00601 
00602   /// FirstNearlyEmptyVBase - The first nearly empty virtual base class in
00603   /// inheritance graph order. Used for determining the primary base class.
00604   const CXXRecordDecl *FirstNearlyEmptyVBase;
00605 
00606   /// VisitedVirtualBases - A set of all the visited virtual bases, used to
00607   /// avoid visiting virtual bases more than once.
00608   llvm::SmallPtrSet<const CXXRecordDecl *, 4> VisitedVirtualBases;
00609 
00610   /// \brief Externally-provided size.
00611   uint64_t ExternalSize;
00612   
00613   /// \brief Externally-provided alignment.
00614   uint64_t ExternalAlign;
00615   
00616   /// \brief Externally-provided field offsets.
00617   llvm::DenseMap<const FieldDecl *, uint64_t> ExternalFieldOffsets;
00618 
00619   /// \brief Externally-provided direct, non-virtual base offsets.
00620   llvm::DenseMap<const CXXRecordDecl *, CharUnits> ExternalBaseOffsets;
00621 
00622   /// \brief Externally-provided virtual base offsets.
00623   llvm::DenseMap<const CXXRecordDecl *, CharUnits> ExternalVirtualBaseOffsets;
00624 
00625   RecordLayoutBuilder(const ASTContext &Context,
00626                       EmptySubobjectMap *EmptySubobjects)
00627     : Context(Context), EmptySubobjects(EmptySubobjects), Size(0), 
00628       Alignment(CharUnits::One()), UnpackedAlignment(CharUnits::One()),
00629       ExternalLayout(false), InferAlignment(false), 
00630       Packed(false), IsUnion(false), IsMac68kAlign(false), IsMsStruct(false),
00631       UnfilledBitsInLastUnit(0), LastBitfieldTypeSize(0),
00632       MaxFieldAlignment(CharUnits::Zero()), 
00633       DataSize(0), NonVirtualSize(CharUnits::Zero()), 
00634       NonVirtualAlignment(CharUnits::One()), 
00635       PrimaryBase(nullptr), PrimaryBaseIsVirtual(false),
00636       HasOwnVFPtr(false),
00637       FirstNearlyEmptyVBase(nullptr) {}
00638 
00639   void Layout(const RecordDecl *D);
00640   void Layout(const CXXRecordDecl *D);
00641   void Layout(const ObjCInterfaceDecl *D);
00642 
00643   void LayoutFields(const RecordDecl *D);
00644   void LayoutField(const FieldDecl *D, bool InsertExtraPadding);
00645   void LayoutWideBitField(uint64_t FieldSize, uint64_t TypeSize,
00646                           bool FieldPacked, const FieldDecl *D);
00647   void LayoutBitField(const FieldDecl *D);
00648 
00649   TargetCXXABI getCXXABI() const {
00650     return Context.getTargetInfo().getCXXABI();
00651   }
00652 
00653   /// BaseSubobjectInfoAllocator - Allocator for BaseSubobjectInfo objects.
00654   llvm::SpecificBumpPtrAllocator<BaseSubobjectInfo> BaseSubobjectInfoAllocator;
00655   
00656   typedef llvm::DenseMap<const CXXRecordDecl *, BaseSubobjectInfo *>
00657     BaseSubobjectInfoMapTy;
00658 
00659   /// VirtualBaseInfo - Map from all the (direct or indirect) virtual bases
00660   /// of the class we're laying out to their base subobject info.
00661   BaseSubobjectInfoMapTy VirtualBaseInfo;
00662   
00663   /// NonVirtualBaseInfo - Map from all the direct non-virtual bases of the
00664   /// class we're laying out to their base subobject info.
00665   BaseSubobjectInfoMapTy NonVirtualBaseInfo;
00666 
00667   /// ComputeBaseSubobjectInfo - Compute the base subobject information for the
00668   /// bases of the given class.
00669   void ComputeBaseSubobjectInfo(const CXXRecordDecl *RD);
00670 
00671   /// ComputeBaseSubobjectInfo - Compute the base subobject information for a
00672   /// single class and all of its base classes.
00673   BaseSubobjectInfo *ComputeBaseSubobjectInfo(const CXXRecordDecl *RD, 
00674                                               bool IsVirtual,
00675                                               BaseSubobjectInfo *Derived);
00676 
00677   /// DeterminePrimaryBase - Determine the primary base of the given class.
00678   void DeterminePrimaryBase(const CXXRecordDecl *RD);
00679 
00680   void SelectPrimaryVBase(const CXXRecordDecl *RD);
00681 
00682   void EnsureVTablePointerAlignment(CharUnits UnpackedBaseAlign);
00683 
00684   /// LayoutNonVirtualBases - Determines the primary base class (if any) and
00685   /// lays it out. Will then proceed to lay out all non-virtual base clasess.
00686   void LayoutNonVirtualBases(const CXXRecordDecl *RD);
00687 
00688   /// LayoutNonVirtualBase - Lays out a single non-virtual base.
00689   void LayoutNonVirtualBase(const BaseSubobjectInfo *Base);
00690 
00691   void AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo *Info,
00692                                     CharUnits Offset);
00693 
00694   /// LayoutVirtualBases - Lays out all the virtual bases.
00695   void LayoutVirtualBases(const CXXRecordDecl *RD,
00696                           const CXXRecordDecl *MostDerivedClass);
00697 
00698   /// LayoutVirtualBase - Lays out a single virtual base.
00699   void LayoutVirtualBase(const BaseSubobjectInfo *Base);
00700 
00701   /// LayoutBase - Will lay out a base and return the offset where it was
00702   /// placed, in chars.
00703   CharUnits LayoutBase(const BaseSubobjectInfo *Base);
00704 
00705   /// InitializeLayout - Initialize record layout for the given record decl.
00706   void InitializeLayout(const Decl *D);
00707 
00708   /// FinishLayout - Finalize record layout. Adjust record size based on the
00709   /// alignment.
00710   void FinishLayout(const NamedDecl *D);
00711 
00712   void UpdateAlignment(CharUnits NewAlignment, CharUnits UnpackedNewAlignment);
00713   void UpdateAlignment(CharUnits NewAlignment) {
00714     UpdateAlignment(NewAlignment, NewAlignment);
00715   }
00716 
00717   /// \brief Retrieve the externally-supplied field offset for the given
00718   /// field.
00719   ///
00720   /// \param Field The field whose offset is being queried.
00721   /// \param ComputedOffset The offset that we've computed for this field.
00722   uint64_t updateExternalFieldOffset(const FieldDecl *Field, 
00723                                      uint64_t ComputedOffset);
00724   
00725   void CheckFieldPadding(uint64_t Offset, uint64_t UnpaddedOffset,
00726                           uint64_t UnpackedOffset, unsigned UnpackedAlign,
00727                           bool isPacked, const FieldDecl *D);
00728 
00729   DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID);
00730 
00731   CharUnits getSize() const { 
00732     assert(Size % Context.getCharWidth() == 0);
00733     return Context.toCharUnitsFromBits(Size); 
00734   }
00735   uint64_t getSizeInBits() const { return Size; }
00736 
00737   void setSize(CharUnits NewSize) { Size = Context.toBits(NewSize); }
00738   void setSize(uint64_t NewSize) { Size = NewSize; }
00739 
00740   CharUnits getAligment() const { return Alignment; }
00741 
00742   CharUnits getDataSize() const { 
00743     assert(DataSize % Context.getCharWidth() == 0);
00744     return Context.toCharUnitsFromBits(DataSize); 
00745   }
00746   uint64_t getDataSizeInBits() const { return DataSize; }
00747 
00748   void setDataSize(CharUnits NewSize) { DataSize = Context.toBits(NewSize); }
00749   void setDataSize(uint64_t NewSize) { DataSize = NewSize; }
00750 
00751   RecordLayoutBuilder(const RecordLayoutBuilder &) LLVM_DELETED_FUNCTION;
00752   void operator=(const RecordLayoutBuilder &) LLVM_DELETED_FUNCTION;
00753 };
00754 } // end anonymous namespace
00755 
00756 void
00757 RecordLayoutBuilder::SelectPrimaryVBase(const CXXRecordDecl *RD) {
00758   for (const auto &I : RD->bases()) {
00759     assert(!I.getType()->isDependentType() &&
00760            "Cannot layout class with dependent bases.");
00761 
00762     const CXXRecordDecl *Base = I.getType()->getAsCXXRecordDecl();
00763 
00764     // Check if this is a nearly empty virtual base.
00765     if (I.isVirtual() && Context.isNearlyEmpty(Base)) {
00766       // If it's not an indirect primary base, then we've found our primary
00767       // base.
00768       if (!IndirectPrimaryBases.count(Base)) {
00769         PrimaryBase = Base;
00770         PrimaryBaseIsVirtual = true;
00771         return;
00772       }
00773 
00774       // Is this the first nearly empty virtual base?
00775       if (!FirstNearlyEmptyVBase)
00776         FirstNearlyEmptyVBase = Base;
00777     }
00778 
00779     SelectPrimaryVBase(Base);
00780     if (PrimaryBase)
00781       return;
00782   }
00783 }
00784 
00785 /// DeterminePrimaryBase - Determine the primary base of the given class.
00786 void RecordLayoutBuilder::DeterminePrimaryBase(const CXXRecordDecl *RD) {
00787   // If the class isn't dynamic, it won't have a primary base.
00788   if (!RD->isDynamicClass())
00789     return;
00790 
00791   // Compute all the primary virtual bases for all of our direct and
00792   // indirect bases, and record all their primary virtual base classes.
00793   RD->getIndirectPrimaryBases(IndirectPrimaryBases);
00794 
00795   // If the record has a dynamic base class, attempt to choose a primary base
00796   // class. It is the first (in direct base class order) non-virtual dynamic
00797   // base class, if one exists.
00798   for (const auto &I : RD->bases()) {
00799     // Ignore virtual bases.
00800     if (I.isVirtual())
00801       continue;
00802 
00803     const CXXRecordDecl *Base = I.getType()->getAsCXXRecordDecl();
00804 
00805     if (Base->isDynamicClass()) {
00806       // We found it.
00807       PrimaryBase = Base;
00808       PrimaryBaseIsVirtual = false;
00809       return;
00810     }
00811   }
00812 
00813   // Under the Itanium ABI, if there is no non-virtual primary base class,
00814   // try to compute the primary virtual base.  The primary virtual base is
00815   // the first nearly empty virtual base that is not an indirect primary
00816   // virtual base class, if one exists.
00817   if (RD->getNumVBases() != 0) {
00818     SelectPrimaryVBase(RD);
00819     if (PrimaryBase)
00820       return;
00821   }
00822 
00823   // Otherwise, it is the first indirect primary base class, if one exists.
00824   if (FirstNearlyEmptyVBase) {
00825     PrimaryBase = FirstNearlyEmptyVBase;
00826     PrimaryBaseIsVirtual = true;
00827     return;
00828   }
00829 
00830   assert(!PrimaryBase && "Should not get here with a primary base!");
00831 }
00832 
00833 BaseSubobjectInfo *
00834 RecordLayoutBuilder::ComputeBaseSubobjectInfo(const CXXRecordDecl *RD, 
00835                                               bool IsVirtual,
00836                                               BaseSubobjectInfo *Derived) {
00837   BaseSubobjectInfo *Info;
00838   
00839   if (IsVirtual) {
00840     // Check if we already have info about this virtual base.
00841     BaseSubobjectInfo *&InfoSlot = VirtualBaseInfo[RD];
00842     if (InfoSlot) {
00843       assert(InfoSlot->Class == RD && "Wrong class for virtual base info!");
00844       return InfoSlot;
00845     }
00846 
00847     // We don't, create it.
00848     InfoSlot = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
00849     Info = InfoSlot;
00850   } else {
00851     Info = new (BaseSubobjectInfoAllocator.Allocate()) BaseSubobjectInfo;
00852   }
00853   
00854   Info->Class = RD;
00855   Info->IsVirtual = IsVirtual;
00856   Info->Derived = nullptr;
00857   Info->PrimaryVirtualBaseInfo = nullptr;
00858 
00859   const CXXRecordDecl *PrimaryVirtualBase = nullptr;
00860   BaseSubobjectInfo *PrimaryVirtualBaseInfo = nullptr;
00861 
00862   // Check if this base has a primary virtual base.
00863   if (RD->getNumVBases()) {
00864     const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
00865     if (Layout.isPrimaryBaseVirtual()) {
00866       // This base does have a primary virtual base.
00867       PrimaryVirtualBase = Layout.getPrimaryBase();
00868       assert(PrimaryVirtualBase && "Didn't have a primary virtual base!");
00869       
00870       // Now check if we have base subobject info about this primary base.
00871       PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
00872       
00873       if (PrimaryVirtualBaseInfo) {
00874         if (PrimaryVirtualBaseInfo->Derived) {
00875           // We did have info about this primary base, and it turns out that it
00876           // has already been claimed as a primary virtual base for another
00877           // base.
00878           PrimaryVirtualBase = nullptr;
00879         } else {
00880           // We can claim this base as our primary base.
00881           Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
00882           PrimaryVirtualBaseInfo->Derived = Info;
00883         }
00884       }
00885     }
00886   }
00887 
00888   // Now go through all direct bases.
00889   for (const auto &I : RD->bases()) {
00890     bool IsVirtual = I.isVirtual();
00891 
00892     const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
00893 
00894     Info->Bases.push_back(ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, Info));
00895   }
00896   
00897   if (PrimaryVirtualBase && !PrimaryVirtualBaseInfo) {
00898     // Traversing the bases must have created the base info for our primary
00899     // virtual base.
00900     PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
00901     assert(PrimaryVirtualBaseInfo &&
00902            "Did not create a primary virtual base!");
00903       
00904     // Claim the primary virtual base as our primary virtual base.
00905     Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
00906     PrimaryVirtualBaseInfo->Derived = Info;
00907   }
00908   
00909   return Info;
00910 }
00911 
00912 void RecordLayoutBuilder::ComputeBaseSubobjectInfo(const CXXRecordDecl *RD) {
00913   for (const auto &I : RD->bases()) {
00914     bool IsVirtual = I.isVirtual();
00915 
00916     const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
00917 
00918     // Compute the base subobject info for this base.
00919     BaseSubobjectInfo *Info = ComputeBaseSubobjectInfo(BaseDecl, IsVirtual,
00920                                                        nullptr);
00921 
00922     if (IsVirtual) {
00923       // ComputeBaseInfo has already added this base for us.
00924       assert(VirtualBaseInfo.count(BaseDecl) &&
00925              "Did not add virtual base!");
00926     } else {
00927       // Add the base info to the map of non-virtual bases.
00928       assert(!NonVirtualBaseInfo.count(BaseDecl) &&
00929              "Non-virtual base already exists!");
00930       NonVirtualBaseInfo.insert(std::make_pair(BaseDecl, Info));
00931     }
00932   }
00933 }
00934 
00935 void
00936 RecordLayoutBuilder::EnsureVTablePointerAlignment(CharUnits UnpackedBaseAlign) {
00937   CharUnits BaseAlign = (Packed) ? CharUnits::One() : UnpackedBaseAlign;
00938 
00939   // The maximum field alignment overrides base align.
00940   if (!MaxFieldAlignment.isZero()) {
00941     BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
00942     UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment);
00943   }
00944 
00945   // Round up the current record size to pointer alignment.
00946   setSize(getSize().RoundUpToAlignment(BaseAlign));
00947   setDataSize(getSize());
00948 
00949   // Update the alignment.
00950   UpdateAlignment(BaseAlign, UnpackedBaseAlign);
00951 }
00952 
00953 void
00954 RecordLayoutBuilder::LayoutNonVirtualBases(const CXXRecordDecl *RD) {
00955   // Then, determine the primary base class.
00956   DeterminePrimaryBase(RD);
00957 
00958   // Compute base subobject info.
00959   ComputeBaseSubobjectInfo(RD);
00960   
00961   // If we have a primary base class, lay it out.
00962   if (PrimaryBase) {
00963     if (PrimaryBaseIsVirtual) {
00964       // If the primary virtual base was a primary virtual base of some other
00965       // base class we'll have to steal it.
00966       BaseSubobjectInfo *PrimaryBaseInfo = VirtualBaseInfo.lookup(PrimaryBase);
00967       PrimaryBaseInfo->Derived = nullptr;
00968 
00969       // We have a virtual primary base, insert it as an indirect primary base.
00970       IndirectPrimaryBases.insert(PrimaryBase);
00971 
00972       assert(!VisitedVirtualBases.count(PrimaryBase) &&
00973              "vbase already visited!");
00974       VisitedVirtualBases.insert(PrimaryBase);
00975 
00976       LayoutVirtualBase(PrimaryBaseInfo);
00977     } else {
00978       BaseSubobjectInfo *PrimaryBaseInfo = 
00979         NonVirtualBaseInfo.lookup(PrimaryBase);
00980       assert(PrimaryBaseInfo && 
00981              "Did not find base info for non-virtual primary base!");
00982 
00983       LayoutNonVirtualBase(PrimaryBaseInfo);
00984     }
00985 
00986   // If this class needs a vtable/vf-table and didn't get one from a
00987   // primary base, add it in now.
00988   } else if (RD->isDynamicClass()) {
00989     assert(DataSize == 0 && "Vtable pointer must be at offset zero!");
00990     CharUnits PtrWidth = 
00991       Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
00992     CharUnits PtrAlign = 
00993       Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(0));
00994     EnsureVTablePointerAlignment(PtrAlign);
00995     HasOwnVFPtr = true;
00996     setSize(getSize() + PtrWidth);
00997     setDataSize(getSize());
00998   }
00999 
01000   // Now lay out the non-virtual bases.
01001   for (const auto &I : RD->bases()) {
01002 
01003     // Ignore virtual bases.
01004     if (I.isVirtual())
01005       continue;
01006 
01007     const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
01008 
01009     // Skip the primary base, because we've already laid it out.  The
01010     // !PrimaryBaseIsVirtual check is required because we might have a
01011     // non-virtual base of the same type as a primary virtual base.
01012     if (BaseDecl == PrimaryBase && !PrimaryBaseIsVirtual)
01013       continue;
01014 
01015     // Lay out the base.
01016     BaseSubobjectInfo *BaseInfo = NonVirtualBaseInfo.lookup(BaseDecl);
01017     assert(BaseInfo && "Did not find base info for non-virtual base!");
01018 
01019     LayoutNonVirtualBase(BaseInfo);
01020   }
01021 }
01022 
01023 void RecordLayoutBuilder::LayoutNonVirtualBase(const BaseSubobjectInfo *Base) {
01024   // Layout the base.
01025   CharUnits Offset = LayoutBase(Base);
01026 
01027   // Add its base class offset.
01028   assert(!Bases.count(Base->Class) && "base offset already exists!");
01029   Bases.insert(std::make_pair(Base->Class, Offset));
01030 
01031   AddPrimaryVirtualBaseOffsets(Base, Offset);
01032 }
01033 
01034 void
01035 RecordLayoutBuilder::AddPrimaryVirtualBaseOffsets(const BaseSubobjectInfo *Info, 
01036                                                   CharUnits Offset) {
01037   // This base isn't interesting, it has no virtual bases.
01038   if (!Info->Class->getNumVBases())
01039     return;
01040   
01041   // First, check if we have a virtual primary base to add offsets for.
01042   if (Info->PrimaryVirtualBaseInfo) {
01043     assert(Info->PrimaryVirtualBaseInfo->IsVirtual && 
01044            "Primary virtual base is not virtual!");
01045     if (Info->PrimaryVirtualBaseInfo->Derived == Info) {
01046       // Add the offset.
01047       assert(!VBases.count(Info->PrimaryVirtualBaseInfo->Class) && 
01048              "primary vbase offset already exists!");
01049       VBases.insert(std::make_pair(Info->PrimaryVirtualBaseInfo->Class,
01050                                    ASTRecordLayout::VBaseInfo(Offset, false)));
01051 
01052       // Traverse the primary virtual base.
01053       AddPrimaryVirtualBaseOffsets(Info->PrimaryVirtualBaseInfo, Offset);
01054     }
01055   }
01056 
01057   // Now go through all direct non-virtual bases.
01058   const ASTRecordLayout &Layout = Context.getASTRecordLayout(Info->Class);
01059   for (const BaseSubobjectInfo *Base : Info->Bases) {
01060     if (Base->IsVirtual)
01061       continue;
01062 
01063     CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base->Class);
01064     AddPrimaryVirtualBaseOffsets(Base, BaseOffset);
01065   }
01066 }
01067 
01068 void
01069 RecordLayoutBuilder::LayoutVirtualBases(const CXXRecordDecl *RD,
01070                                         const CXXRecordDecl *MostDerivedClass) {
01071   const CXXRecordDecl *PrimaryBase;
01072   bool PrimaryBaseIsVirtual;
01073 
01074   if (MostDerivedClass == RD) {
01075     PrimaryBase = this->PrimaryBase;
01076     PrimaryBaseIsVirtual = this->PrimaryBaseIsVirtual;
01077   } else {
01078     const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
01079     PrimaryBase = Layout.getPrimaryBase();
01080     PrimaryBaseIsVirtual = Layout.isPrimaryBaseVirtual();
01081   }
01082 
01083   for (const CXXBaseSpecifier &Base : RD->bases()) {
01084     assert(!Base.getType()->isDependentType() &&
01085            "Cannot layout class with dependent bases.");
01086 
01087     const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
01088 
01089     if (Base.isVirtual()) {
01090       if (PrimaryBase != BaseDecl || !PrimaryBaseIsVirtual) {
01091         bool IndirectPrimaryBase = IndirectPrimaryBases.count(BaseDecl);
01092 
01093         // Only lay out the virtual base if it's not an indirect primary base.
01094         if (!IndirectPrimaryBase) {
01095           // Only visit virtual bases once.
01096           if (!VisitedVirtualBases.insert(BaseDecl))
01097             continue;
01098 
01099           const BaseSubobjectInfo *BaseInfo = VirtualBaseInfo.lookup(BaseDecl);
01100           assert(BaseInfo && "Did not find virtual base info!");
01101           LayoutVirtualBase(BaseInfo);
01102         }
01103       }
01104     }
01105 
01106     if (!BaseDecl->getNumVBases()) {
01107       // This base isn't interesting since it doesn't have any virtual bases.
01108       continue;
01109     }
01110 
01111     LayoutVirtualBases(BaseDecl, MostDerivedClass);
01112   }
01113 }
01114 
01115 void RecordLayoutBuilder::LayoutVirtualBase(const BaseSubobjectInfo *Base) {
01116   assert(!Base->Derived && "Trying to lay out a primary virtual base!");
01117   
01118   // Layout the base.
01119   CharUnits Offset = LayoutBase(Base);
01120 
01121   // Add its base class offset.
01122   assert(!VBases.count(Base->Class) && "vbase offset already exists!");
01123   VBases.insert(std::make_pair(Base->Class, 
01124                        ASTRecordLayout::VBaseInfo(Offset, false)));
01125 
01126   AddPrimaryVirtualBaseOffsets(Base, Offset);
01127 }
01128 
01129 CharUnits RecordLayoutBuilder::LayoutBase(const BaseSubobjectInfo *Base) {
01130   const ASTRecordLayout &Layout = Context.getASTRecordLayout(Base->Class);
01131 
01132   
01133   CharUnits Offset;
01134   
01135   // Query the external layout to see if it provides an offset.
01136   bool HasExternalLayout = false;
01137   if (ExternalLayout) {
01138     llvm::DenseMap<const CXXRecordDecl *, CharUnits>::iterator Known;
01139     if (Base->IsVirtual) {
01140       Known = ExternalVirtualBaseOffsets.find(Base->Class);
01141       if (Known != ExternalVirtualBaseOffsets.end()) {
01142         Offset = Known->second;
01143         HasExternalLayout = true;
01144       }
01145     } else {
01146       Known = ExternalBaseOffsets.find(Base->Class);
01147       if (Known != ExternalBaseOffsets.end()) {
01148         Offset = Known->second;
01149         HasExternalLayout = true;
01150       }
01151     }
01152   }
01153   
01154   CharUnits UnpackedBaseAlign = Layout.getNonVirtualAlignment();
01155   CharUnits BaseAlign = (Packed) ? CharUnits::One() : UnpackedBaseAlign;
01156  
01157   // If we have an empty base class, try to place it at offset 0.
01158   if (Base->Class->isEmpty() &&
01159       (!HasExternalLayout || Offset == CharUnits::Zero()) &&
01160       EmptySubobjects->CanPlaceBaseAtOffset(Base, CharUnits::Zero())) {
01161     setSize(std::max(getSize(), Layout.getSize()));
01162     UpdateAlignment(BaseAlign, UnpackedBaseAlign);
01163 
01164     return CharUnits::Zero();
01165   }
01166 
01167   // The maximum field alignment overrides base align.
01168   if (!MaxFieldAlignment.isZero()) {
01169     BaseAlign = std::min(BaseAlign, MaxFieldAlignment);
01170     UnpackedBaseAlign = std::min(UnpackedBaseAlign, MaxFieldAlignment);
01171   }
01172 
01173   if (!HasExternalLayout) {
01174     // Round up the current record size to the base's alignment boundary.
01175     Offset = getDataSize().RoundUpToAlignment(BaseAlign);
01176 
01177     // Try to place the base.
01178     while (!EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset))
01179       Offset += BaseAlign;
01180   } else {
01181     bool Allowed = EmptySubobjects->CanPlaceBaseAtOffset(Base, Offset);
01182     (void)Allowed;
01183     assert(Allowed && "Base subobject externally placed at overlapping offset");
01184 
01185     if (InferAlignment && Offset < getDataSize().RoundUpToAlignment(BaseAlign)){
01186       // The externally-supplied base offset is before the base offset we
01187       // computed. Assume that the structure is packed.
01188       Alignment = CharUnits::One();
01189       InferAlignment = false;
01190     }
01191   }
01192   
01193   if (!Base->Class->isEmpty()) {
01194     // Update the data size.
01195     setDataSize(Offset + Layout.getNonVirtualSize());
01196 
01197     setSize(std::max(getSize(), getDataSize()));
01198   } else
01199     setSize(std::max(getSize(), Offset + Layout.getSize()));
01200 
01201   // Remember max struct/class alignment.
01202   UpdateAlignment(BaseAlign, UnpackedBaseAlign);
01203 
01204   return Offset;
01205 }
01206 
01207 void RecordLayoutBuilder::InitializeLayout(const Decl *D) {
01208   if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) {
01209     IsUnion = RD->isUnion();
01210     IsMsStruct = RD->isMsStruct(Context);
01211   }
01212 
01213   Packed = D->hasAttr<PackedAttr>();  
01214 
01215   // Honor the default struct packing maximum alignment flag.
01216   if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct) {
01217     MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
01218   }
01219 
01220   // mac68k alignment supersedes maximum field alignment and attribute aligned,
01221   // and forces all structures to have 2-byte alignment. The IBM docs on it
01222   // allude to additional (more complicated) semantics, especially with regard
01223   // to bit-fields, but gcc appears not to follow that.
01224   if (D->hasAttr<AlignMac68kAttr>()) {
01225     IsMac68kAlign = true;
01226     MaxFieldAlignment = CharUnits::fromQuantity(2);
01227     Alignment = CharUnits::fromQuantity(2);
01228   } else {
01229     if (const MaxFieldAlignmentAttr *MFAA = D->getAttr<MaxFieldAlignmentAttr>())
01230       MaxFieldAlignment = Context.toCharUnitsFromBits(MFAA->getAlignment());
01231 
01232     if (unsigned MaxAlign = D->getMaxAlignment())
01233       UpdateAlignment(Context.toCharUnitsFromBits(MaxAlign));
01234   }
01235   
01236   // If there is an external AST source, ask it for the various offsets.
01237   if (const RecordDecl *RD = dyn_cast<RecordDecl>(D))
01238     if (ExternalASTSource *External = Context.getExternalSource()) {
01239       ExternalLayout = External->layoutRecordType(RD, 
01240                                                   ExternalSize,
01241                                                   ExternalAlign,
01242                                                   ExternalFieldOffsets,
01243                                                   ExternalBaseOffsets,
01244                                                   ExternalVirtualBaseOffsets);
01245       
01246       // Update based on external alignment.
01247       if (ExternalLayout) {
01248         if (ExternalAlign > 0) {
01249           Alignment = Context.toCharUnitsFromBits(ExternalAlign);
01250         } else {
01251           // The external source didn't have alignment information; infer it.
01252           InferAlignment = true;
01253         }
01254       }
01255     }
01256 }
01257 
01258 void RecordLayoutBuilder::Layout(const RecordDecl *D) {
01259   InitializeLayout(D);
01260   LayoutFields(D);
01261 
01262   // Finally, round the size of the total struct up to the alignment of the
01263   // struct itself.
01264   FinishLayout(D);
01265 }
01266 
01267 void RecordLayoutBuilder::Layout(const CXXRecordDecl *RD) {
01268   InitializeLayout(RD);
01269 
01270   // Lay out the vtable and the non-virtual bases.
01271   LayoutNonVirtualBases(RD);
01272 
01273   LayoutFields(RD);
01274 
01275   NonVirtualSize = Context.toCharUnitsFromBits(
01276         llvm::RoundUpToAlignment(getSizeInBits(), 
01277                                  Context.getTargetInfo().getCharAlign()));
01278   NonVirtualAlignment = Alignment;
01279 
01280   // Lay out the virtual bases and add the primary virtual base offsets.
01281   LayoutVirtualBases(RD, RD);
01282 
01283   // Finally, round the size of the total struct up to the alignment
01284   // of the struct itself.
01285   FinishLayout(RD);
01286 
01287 #ifndef NDEBUG
01288   // Check that we have base offsets for all bases.
01289   for (const CXXBaseSpecifier &Base : RD->bases()) {
01290     if (Base.isVirtual())
01291       continue;
01292 
01293     const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
01294 
01295     assert(Bases.count(BaseDecl) && "Did not find base offset!");
01296   }
01297 
01298   // And all virtual bases.
01299   for (const CXXBaseSpecifier &Base : RD->vbases()) {
01300     const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
01301 
01302     assert(VBases.count(BaseDecl) && "Did not find base offset!");
01303   }
01304 #endif
01305 }
01306 
01307 void RecordLayoutBuilder::Layout(const ObjCInterfaceDecl *D) {
01308   if (ObjCInterfaceDecl *SD = D->getSuperClass()) {
01309     const ASTRecordLayout &SL = Context.getASTObjCInterfaceLayout(SD);
01310 
01311     UpdateAlignment(SL.getAlignment());
01312 
01313     // We start laying out ivars not at the end of the superclass
01314     // structure, but at the next byte following the last field.
01315     setSize(SL.getDataSize());
01316     setDataSize(getSize());
01317   }
01318 
01319   InitializeLayout(D);
01320   // Layout each ivar sequentially.
01321   for (const ObjCIvarDecl *IVD = D->all_declared_ivar_begin(); IVD;
01322        IVD = IVD->getNextIvar())
01323     LayoutField(IVD, false);
01324 
01325   // Finally, round the size of the total struct up to the alignment of the
01326   // struct itself.
01327   FinishLayout(D);
01328 }
01329 
01330 void RecordLayoutBuilder::LayoutFields(const RecordDecl *D) {
01331   // Layout each field, for now, just sequentially, respecting alignment.  In
01332   // the future, this will need to be tweakable by targets.
01333   bool InsertExtraPadding = D->mayInsertExtraPadding(/*EmitRemark=*/true);
01334   bool HasFlexibleArrayMember = D->hasFlexibleArrayMember();
01335   for (auto I = D->field_begin(), End = D->field_end(); I != End; ++I) {
01336     auto Next(I);
01337     ++Next;
01338     LayoutField(*I,
01339                 InsertExtraPadding && (Next != End || !HasFlexibleArrayMember));
01340   }
01341 }
01342 
01343 // Rounds the specified size to have it a multiple of the char size.
01344 static uint64_t
01345 roundUpSizeToCharAlignment(uint64_t Size,
01346                            const ASTContext &Context) {
01347   uint64_t CharAlignment = Context.getTargetInfo().getCharAlign();
01348   return llvm::RoundUpToAlignment(Size, CharAlignment);
01349 }
01350 
01351 void RecordLayoutBuilder::LayoutWideBitField(uint64_t FieldSize,
01352                                              uint64_t TypeSize,
01353                                              bool FieldPacked,
01354                                              const FieldDecl *D) {
01355   assert(Context.getLangOpts().CPlusPlus &&
01356          "Can only have wide bit-fields in C++!");
01357 
01358   // Itanium C++ ABI 2.4:
01359   //   If sizeof(T)*8 < n, let T' be the largest integral POD type with
01360   //   sizeof(T')*8 <= n.
01361 
01362   QualType IntegralPODTypes[] = {
01363     Context.UnsignedCharTy, Context.UnsignedShortTy, Context.UnsignedIntTy,
01364     Context.UnsignedLongTy, Context.UnsignedLongLongTy
01365   };
01366 
01367   QualType Type;
01368   for (const QualType &QT : IntegralPODTypes) {
01369     uint64_t Size = Context.getTypeSize(QT);
01370 
01371     if (Size > FieldSize)
01372       break;
01373 
01374     Type = QT;
01375   }
01376   assert(!Type.isNull() && "Did not find a type!");
01377 
01378   CharUnits TypeAlign = Context.getTypeAlignInChars(Type);
01379 
01380   // We're not going to use any of the unfilled bits in the last byte.
01381   UnfilledBitsInLastUnit = 0;
01382   LastBitfieldTypeSize = 0;
01383 
01384   uint64_t FieldOffset;
01385   uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
01386 
01387   if (IsUnion) {
01388     uint64_t RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize,
01389                                                            Context);
01390     setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize));
01391     FieldOffset = 0;
01392   } else {
01393     // The bitfield is allocated starting at the next offset aligned 
01394     // appropriately for T', with length n bits.
01395     FieldOffset = llvm::RoundUpToAlignment(getDataSizeInBits(), 
01396                                            Context.toBits(TypeAlign));
01397 
01398     uint64_t NewSizeInBits = FieldOffset + FieldSize;
01399 
01400     setDataSize(llvm::RoundUpToAlignment(NewSizeInBits, 
01401                                          Context.getTargetInfo().getCharAlign()));
01402     UnfilledBitsInLastUnit = getDataSizeInBits() - NewSizeInBits;
01403   }
01404 
01405   // Place this field at the current location.
01406   FieldOffsets.push_back(FieldOffset);
01407 
01408   CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, FieldOffset,
01409                     Context.toBits(TypeAlign), FieldPacked, D);
01410 
01411   // Update the size.
01412   setSize(std::max(getSizeInBits(), getDataSizeInBits()));
01413 
01414   // Remember max struct/class alignment.
01415   UpdateAlignment(TypeAlign);
01416 }
01417 
01418 void RecordLayoutBuilder::LayoutBitField(const FieldDecl *D) {
01419   bool FieldPacked = Packed || D->hasAttr<PackedAttr>();
01420   uint64_t FieldSize = D->getBitWidthValue(Context);
01421   TypeInfo FieldInfo = Context.getTypeInfo(D->getType());
01422   uint64_t TypeSize = FieldInfo.Width;
01423   unsigned FieldAlign = FieldInfo.Align;
01424 
01425   // UnfilledBitsInLastUnit is the difference between the end of the
01426   // last allocated bitfield (i.e. the first bit offset available for
01427   // bitfields) and the end of the current data size in bits (i.e. the
01428   // first bit offset available for non-bitfields).  The current data
01429   // size in bits is always a multiple of the char size; additionally,
01430   // for ms_struct records it's also a multiple of the
01431   // LastBitfieldTypeSize (if set).
01432 
01433   // The struct-layout algorithm is dictated by the platform ABI,
01434   // which in principle could use almost any rules it likes.  In
01435   // practice, UNIXy targets tend to inherit the algorithm described
01436   // in the System V generic ABI.  The basic bitfield layout rule in
01437   // System V is to place bitfields at the next available bit offset
01438   // where the entire bitfield would fit in an aligned storage unit of
01439   // the declared type; it's okay if an earlier or later non-bitfield
01440   // is allocated in the same storage unit.  However, some targets
01441   // (those that !useBitFieldTypeAlignment(), e.g. ARM APCS) don't
01442   // require this storage unit to be aligned, and therefore always put
01443   // the bitfield at the next available bit offset.
01444 
01445   // ms_struct basically requests a complete replacement of the
01446   // platform ABI's struct-layout algorithm, with the high-level goal
01447   // of duplicating MSVC's layout.  For non-bitfields, this follows
01448   // the the standard algorithm.  The basic bitfield layout rule is to
01449   // allocate an entire unit of the bitfield's declared type
01450   // (e.g. 'unsigned long'), then parcel it up among successive
01451   // bitfields whose declared types have the same size, making a new
01452   // unit as soon as the last can no longer store the whole value.
01453   // Since it completely replaces the platform ABI's algorithm,
01454   // settings like !useBitFieldTypeAlignment() do not apply.
01455 
01456   // A zero-width bitfield forces the use of a new storage unit for
01457   // later bitfields.  In general, this occurs by rounding up the
01458   // current size of the struct as if the algorithm were about to
01459   // place a non-bitfield of the field's formal type.  Usually this
01460   // does not change the alignment of the struct itself, but it does
01461   // on some targets (those that useZeroLengthBitfieldAlignment(),
01462   // e.g. ARM).  In ms_struct layout, zero-width bitfields are
01463   // ignored unless they follow a non-zero-width bitfield.
01464 
01465   // A field alignment restriction (e.g. from #pragma pack) or
01466   // specification (e.g. from __attribute__((aligned))) changes the
01467   // formal alignment of the field.  For System V, this alters the
01468   // required alignment of the notional storage unit that must contain
01469   // the bitfield.  For ms_struct, this only affects the placement of
01470   // new storage units.  In both cases, the effect of #pragma pack is
01471   // ignored on zero-width bitfields.
01472 
01473   // On System V, a packed field (e.g. from #pragma pack or
01474   // __attribute__((packed))) always uses the next available bit
01475   // offset.
01476 
01477   // In an ms_struct struct, the alignment of a fundamental type is
01478   // always equal to its size.  This is necessary in order to mimic
01479   // the i386 alignment rules on targets which might not fully align
01480   // all types (e.g. Darwin PPC32, where alignof(long long) == 4).
01481 
01482   // First, some simple bookkeeping to perform for ms_struct structs.
01483   if (IsMsStruct) {
01484     // The field alignment for integer types is always the size.
01485     FieldAlign = TypeSize;
01486 
01487     // If the previous field was not a bitfield, or was a bitfield
01488     // with a different storage unit size, we're done with that
01489     // storage unit.
01490     if (LastBitfieldTypeSize != TypeSize) {
01491       // Also, ignore zero-length bitfields after non-bitfields.
01492       if (!LastBitfieldTypeSize && !FieldSize)
01493         FieldAlign = 1;
01494 
01495       UnfilledBitsInLastUnit = 0;
01496       LastBitfieldTypeSize = 0;
01497     }
01498   }
01499 
01500   // If the field is wider than its declared type, it follows
01501   // different rules in all cases.
01502   if (FieldSize > TypeSize) {
01503     LayoutWideBitField(FieldSize, TypeSize, FieldPacked, D);
01504     return;
01505   }
01506 
01507   // Compute the next available bit offset.
01508   uint64_t FieldOffset =
01509     IsUnion ? 0 : (getDataSizeInBits() - UnfilledBitsInLastUnit);
01510 
01511   // Handle targets that don't honor bitfield type alignment.
01512   if (!IsMsStruct && !Context.getTargetInfo().useBitFieldTypeAlignment()) {
01513     // Some such targets do honor it on zero-width bitfields.
01514     if (FieldSize == 0 &&
01515         Context.getTargetInfo().useZeroLengthBitfieldAlignment()) {
01516       // The alignment to round up to is the max of the field's natural
01517       // alignment and a target-specific fixed value (sometimes zero).
01518       unsigned ZeroLengthBitfieldBoundary =
01519         Context.getTargetInfo().getZeroLengthBitfieldBoundary();
01520       FieldAlign = std::max(FieldAlign, ZeroLengthBitfieldBoundary);
01521 
01522     // If that doesn't apply, just ignore the field alignment.
01523     } else {
01524       FieldAlign = 1;
01525     }
01526   }
01527 
01528   // Remember the alignment we would have used if the field were not packed.
01529   unsigned UnpackedFieldAlign = FieldAlign;
01530 
01531   // Ignore the field alignment if the field is packed unless it has zero-size.
01532   if (!IsMsStruct && FieldPacked && FieldSize != 0)
01533     FieldAlign = 1;
01534 
01535   // But, if there's an 'aligned' attribute on the field, honor that.
01536   if (unsigned ExplicitFieldAlign = D->getMaxAlignment()) {
01537     FieldAlign = std::max(FieldAlign, ExplicitFieldAlign);
01538     UnpackedFieldAlign = std::max(UnpackedFieldAlign, ExplicitFieldAlign);
01539   }
01540 
01541   // But, if there's a #pragma pack in play, that takes precedent over
01542   // even the 'aligned' attribute, for non-zero-width bitfields.
01543   if (!MaxFieldAlignment.isZero() && FieldSize) {
01544     unsigned MaxFieldAlignmentInBits = Context.toBits(MaxFieldAlignment);
01545     FieldAlign = std::min(FieldAlign, MaxFieldAlignmentInBits);
01546     UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignmentInBits);
01547   }
01548 
01549   // For purposes of diagnostics, we're going to simultaneously
01550   // compute the field offsets that we would have used if we weren't
01551   // adding any alignment padding or if the field weren't packed.
01552   uint64_t UnpaddedFieldOffset = FieldOffset;
01553   uint64_t UnpackedFieldOffset = FieldOffset;
01554 
01555   // Check if we need to add padding to fit the bitfield within an
01556   // allocation unit with the right size and alignment.  The rules are
01557   // somewhat different here for ms_struct structs.
01558   if (IsMsStruct) {
01559     // If it's not a zero-width bitfield, and we can fit the bitfield
01560     // into the active storage unit (and we haven't already decided to
01561     // start a new storage unit), just do so, regardless of any other
01562     // other consideration.  Otherwise, round up to the right alignment.
01563     if (FieldSize == 0 || FieldSize > UnfilledBitsInLastUnit) {
01564       FieldOffset = llvm::RoundUpToAlignment(FieldOffset, FieldAlign);
01565       UnpackedFieldOffset = llvm::RoundUpToAlignment(UnpackedFieldOffset,
01566                                                      UnpackedFieldAlign);
01567       UnfilledBitsInLastUnit = 0;
01568     }
01569 
01570   } else {
01571     // #pragma pack, with any value, suppresses the insertion of padding.
01572     bool AllowPadding = MaxFieldAlignment.isZero();
01573 
01574     // Compute the real offset.
01575     if (FieldSize == 0 || 
01576         (AllowPadding &&
01577          (FieldOffset & (FieldAlign-1)) + FieldSize > TypeSize)) {
01578       FieldOffset = llvm::RoundUpToAlignment(FieldOffset, FieldAlign);
01579     }
01580 
01581     // Repeat the computation for diagnostic purposes.
01582     if (FieldSize == 0 ||
01583         (AllowPadding &&
01584          (UnpackedFieldOffset & (UnpackedFieldAlign-1)) + FieldSize > TypeSize))
01585       UnpackedFieldOffset = llvm::RoundUpToAlignment(UnpackedFieldOffset,
01586                                                      UnpackedFieldAlign);
01587   }
01588 
01589   // If we're using external layout, give the external layout a chance
01590   // to override this information.
01591   if (ExternalLayout)
01592     FieldOffset = updateExternalFieldOffset(D, FieldOffset);
01593 
01594   // Okay, place the bitfield at the calculated offset.
01595   FieldOffsets.push_back(FieldOffset);
01596 
01597   // Bookkeeping:
01598 
01599   // Anonymous members don't affect the overall record alignment,
01600   // except on targets where they do.
01601   if (!IsMsStruct &&
01602       !Context.getTargetInfo().useZeroLengthBitfieldAlignment() &&
01603       !D->getIdentifier())
01604     FieldAlign = UnpackedFieldAlign = 1;
01605 
01606   // Diagnose differences in layout due to padding or packing.
01607   if (!ExternalLayout)
01608     CheckFieldPadding(FieldOffset, UnpaddedFieldOffset, UnpackedFieldOffset,
01609                       UnpackedFieldAlign, FieldPacked, D);
01610 
01611   // Update DataSize to include the last byte containing (part of) the bitfield.
01612 
01613   // For unions, this is just a max operation, as usual.
01614   if (IsUnion) {
01615     uint64_t RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize,
01616                                                            Context);
01617     setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize));
01618   // For non-zero-width bitfields in ms_struct structs, allocate a new
01619   // storage unit if necessary.
01620   } else if (IsMsStruct && FieldSize) {
01621     // We should have cleared UnfilledBitsInLastUnit in every case
01622     // where we changed storage units.
01623     if (!UnfilledBitsInLastUnit) {
01624       setDataSize(FieldOffset + TypeSize);
01625       UnfilledBitsInLastUnit = TypeSize;
01626     }
01627     UnfilledBitsInLastUnit -= FieldSize;
01628     LastBitfieldTypeSize = TypeSize;
01629 
01630   // Otherwise, bump the data size up to include the bitfield,
01631   // including padding up to char alignment, and then remember how
01632   // bits we didn't use.
01633   } else {
01634     uint64_t NewSizeInBits = FieldOffset + FieldSize;
01635     uint64_t CharAlignment = Context.getTargetInfo().getCharAlign();
01636     setDataSize(llvm::RoundUpToAlignment(NewSizeInBits, CharAlignment));
01637     UnfilledBitsInLastUnit = getDataSizeInBits() - NewSizeInBits;
01638 
01639     // The only time we can get here for an ms_struct is if this is a
01640     // zero-width bitfield, which doesn't count as anything for the
01641     // purposes of unfilled bits.
01642     LastBitfieldTypeSize = 0;
01643   }
01644 
01645   // Update the size.
01646   setSize(std::max(getSizeInBits(), getDataSizeInBits()));
01647 
01648   // Remember max struct/class alignment.
01649   UpdateAlignment(Context.toCharUnitsFromBits(FieldAlign), 
01650                   Context.toCharUnitsFromBits(UnpackedFieldAlign));
01651 }
01652 
01653 void RecordLayoutBuilder::LayoutField(const FieldDecl *D,
01654                                       bool InsertExtraPadding) {
01655   if (D->isBitField()) {
01656     LayoutBitField(D);
01657     return;
01658   }
01659 
01660   uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
01661 
01662   // Reset the unfilled bits.
01663   UnfilledBitsInLastUnit = 0;
01664   LastBitfieldTypeSize = 0;
01665 
01666   bool FieldPacked = Packed || D->hasAttr<PackedAttr>();
01667   CharUnits FieldOffset = 
01668     IsUnion ? CharUnits::Zero() : getDataSize();
01669   CharUnits FieldSize;
01670   CharUnits FieldAlign;
01671 
01672   if (D->getType()->isIncompleteArrayType()) {
01673     // This is a flexible array member; we can't directly
01674     // query getTypeInfo about these, so we figure it out here.
01675     // Flexible array members don't have any size, but they
01676     // have to be aligned appropriately for their element type.
01677     FieldSize = CharUnits::Zero();
01678     const ArrayType* ATy = Context.getAsArrayType(D->getType());
01679     FieldAlign = Context.getTypeAlignInChars(ATy->getElementType());
01680   } else if (const ReferenceType *RT = D->getType()->getAs<ReferenceType>()) {
01681     unsigned AS = RT->getPointeeType().getAddressSpace();
01682     FieldSize = 
01683       Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(AS));
01684     FieldAlign = 
01685       Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerAlign(AS));
01686   } else {
01687     std::pair<CharUnits, CharUnits> FieldInfo = 
01688       Context.getTypeInfoInChars(D->getType());
01689     FieldSize = FieldInfo.first;
01690     FieldAlign = FieldInfo.second;
01691 
01692     if (IsMsStruct) {
01693       // If MS bitfield layout is required, figure out what type is being
01694       // laid out and align the field to the width of that type.
01695       
01696       // Resolve all typedefs down to their base type and round up the field
01697       // alignment if necessary.
01698       QualType T = Context.getBaseElementType(D->getType());
01699       if (const BuiltinType *BTy = T->getAs<BuiltinType>()) {
01700         CharUnits TypeSize = Context.getTypeSizeInChars(BTy);
01701         if (TypeSize > FieldAlign)
01702           FieldAlign = TypeSize;
01703       }
01704     }
01705   }
01706 
01707   // The align if the field is not packed. This is to check if the attribute
01708   // was unnecessary (-Wpacked).
01709   CharUnits UnpackedFieldAlign = FieldAlign;
01710   CharUnits UnpackedFieldOffset = FieldOffset;
01711 
01712   if (FieldPacked)
01713     FieldAlign = CharUnits::One();
01714   CharUnits MaxAlignmentInChars = 
01715     Context.toCharUnitsFromBits(D->getMaxAlignment());
01716   FieldAlign = std::max(FieldAlign, MaxAlignmentInChars);
01717   UnpackedFieldAlign = std::max(UnpackedFieldAlign, MaxAlignmentInChars);
01718 
01719   // The maximum field alignment overrides the aligned attribute.
01720   if (!MaxFieldAlignment.isZero()) {
01721     FieldAlign = std::min(FieldAlign, MaxFieldAlignment);
01722     UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignment);
01723   }
01724 
01725   // Round up the current record size to the field's alignment boundary.
01726   FieldOffset = FieldOffset.RoundUpToAlignment(FieldAlign);
01727   UnpackedFieldOffset = 
01728     UnpackedFieldOffset.RoundUpToAlignment(UnpackedFieldAlign);
01729 
01730   if (ExternalLayout) {
01731     FieldOffset = Context.toCharUnitsFromBits(
01732                     updateExternalFieldOffset(D, Context.toBits(FieldOffset)));
01733     
01734     if (!IsUnion && EmptySubobjects) {
01735       // Record the fact that we're placing a field at this offset.
01736       bool Allowed = EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset);
01737       (void)Allowed;
01738       assert(Allowed && "Externally-placed field cannot be placed here");      
01739     }
01740   } else {
01741     if (!IsUnion && EmptySubobjects) {
01742       // Check if we can place the field at this offset.
01743       while (!EmptySubobjects->CanPlaceFieldAtOffset(D, FieldOffset)) {
01744         // We couldn't place the field at the offset. Try again at a new offset.
01745         FieldOffset += FieldAlign;
01746       }
01747     }
01748   }
01749   
01750   // Place this field at the current location.
01751   FieldOffsets.push_back(Context.toBits(FieldOffset));
01752 
01753   if (!ExternalLayout)
01754     CheckFieldPadding(Context.toBits(FieldOffset), UnpaddedFieldOffset, 
01755                       Context.toBits(UnpackedFieldOffset),
01756                       Context.toBits(UnpackedFieldAlign), FieldPacked, D);
01757 
01758   if (InsertExtraPadding) {
01759     CharUnits ASanAlignment = CharUnits::fromQuantity(8);
01760     CharUnits ExtraSizeForAsan = ASanAlignment;
01761     if (FieldSize % ASanAlignment)
01762       ExtraSizeForAsan +=
01763           ASanAlignment - CharUnits::fromQuantity(FieldSize % ASanAlignment);
01764     FieldSize += ExtraSizeForAsan;
01765   }
01766 
01767   // Reserve space for this field.
01768   uint64_t FieldSizeInBits = Context.toBits(FieldSize);
01769   if (IsUnion)
01770     setDataSize(std::max(getDataSizeInBits(), FieldSizeInBits));
01771   else
01772     setDataSize(FieldOffset + FieldSize);
01773 
01774   // Update the size.
01775   setSize(std::max(getSizeInBits(), getDataSizeInBits()));
01776 
01777   // Remember max struct/class alignment.
01778   UpdateAlignment(FieldAlign, UnpackedFieldAlign);
01779 }
01780 
01781 void RecordLayoutBuilder::FinishLayout(const NamedDecl *D) {
01782   // In C++, records cannot be of size 0.
01783   if (Context.getLangOpts().CPlusPlus && getSizeInBits() == 0) {
01784     if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) {
01785       // Compatibility with gcc requires a class (pod or non-pod)
01786       // which is not empty but of size 0; such as having fields of
01787       // array of zero-length, remains of Size 0
01788       if (RD->isEmpty())
01789         setSize(CharUnits::One());
01790     }
01791     else
01792       setSize(CharUnits::One());
01793   }
01794 
01795   // Finally, round the size of the record up to the alignment of the
01796   // record itself.
01797   uint64_t UnpaddedSize = getSizeInBits() - UnfilledBitsInLastUnit;
01798   uint64_t UnpackedSizeInBits =
01799   llvm::RoundUpToAlignment(getSizeInBits(),
01800                            Context.toBits(UnpackedAlignment));
01801   CharUnits UnpackedSize = Context.toCharUnitsFromBits(UnpackedSizeInBits);
01802   uint64_t RoundedSize
01803     = llvm::RoundUpToAlignment(getSizeInBits(), Context.toBits(Alignment));
01804 
01805   if (ExternalLayout) {
01806     // If we're inferring alignment, and the external size is smaller than
01807     // our size after we've rounded up to alignment, conservatively set the
01808     // alignment to 1.
01809     if (InferAlignment && ExternalSize < RoundedSize) {
01810       Alignment = CharUnits::One();
01811       InferAlignment = false;
01812     }
01813     setSize(ExternalSize);
01814     return;
01815   }
01816 
01817   // Set the size to the final size.
01818   setSize(RoundedSize);
01819 
01820   unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
01821   if (const RecordDecl *RD = dyn_cast<RecordDecl>(D)) {
01822     // Warn if padding was introduced to the struct/class/union.
01823     if (getSizeInBits() > UnpaddedSize) {
01824       unsigned PadSize = getSizeInBits() - UnpaddedSize;
01825       bool InBits = true;
01826       if (PadSize % CharBitNum == 0) {
01827         PadSize = PadSize / CharBitNum;
01828         InBits = false;
01829       }
01830       Diag(RD->getLocation(), diag::warn_padded_struct_size)
01831           << Context.getTypeDeclType(RD)
01832           << PadSize
01833           << (InBits ? 1 : 0) /*(byte|bit)*/ << (PadSize > 1); // plural or not
01834     }
01835 
01836     // Warn if we packed it unnecessarily. If the alignment is 1 byte don't
01837     // bother since there won't be alignment issues.
01838     if (Packed && UnpackedAlignment > CharUnits::One() && 
01839         getSize() == UnpackedSize)
01840       Diag(D->getLocation(), diag::warn_unnecessary_packed)
01841           << Context.getTypeDeclType(RD);
01842   }
01843 }
01844 
01845 void RecordLayoutBuilder::UpdateAlignment(CharUnits NewAlignment,
01846                                           CharUnits UnpackedNewAlignment) {
01847   // The alignment is not modified when using 'mac68k' alignment or when
01848   // we have an externally-supplied layout that also provides overall alignment.
01849   if (IsMac68kAlign || (ExternalLayout && !InferAlignment))
01850     return;
01851 
01852   if (NewAlignment > Alignment) {
01853     assert(llvm::isPowerOf2_32(NewAlignment.getQuantity() && 
01854            "Alignment not a power of 2"));
01855     Alignment = NewAlignment;
01856   }
01857 
01858   if (UnpackedNewAlignment > UnpackedAlignment) {
01859     assert(llvm::isPowerOf2_32(UnpackedNewAlignment.getQuantity() &&
01860            "Alignment not a power of 2"));
01861     UnpackedAlignment = UnpackedNewAlignment;
01862   }
01863 }
01864 
01865 uint64_t
01866 RecordLayoutBuilder::updateExternalFieldOffset(const FieldDecl *Field, 
01867                                                uint64_t ComputedOffset) {
01868   assert(ExternalFieldOffsets.find(Field) != ExternalFieldOffsets.end() &&
01869          "Field does not have an external offset");
01870   
01871   uint64_t ExternalFieldOffset = ExternalFieldOffsets[Field];
01872   
01873   if (InferAlignment && ExternalFieldOffset < ComputedOffset) {
01874     // The externally-supplied field offset is before the field offset we
01875     // computed. Assume that the structure is packed.
01876     Alignment = CharUnits::One();
01877     InferAlignment = false;
01878   }
01879   
01880   // Use the externally-supplied field offset.
01881   return ExternalFieldOffset;
01882 }
01883 
01884 /// \brief Get diagnostic %select index for tag kind for
01885 /// field padding diagnostic message.
01886 /// WARNING: Indexes apply to particular diagnostics only!
01887 ///
01888 /// \returns diagnostic %select index.
01889 static unsigned getPaddingDiagFromTagKind(TagTypeKind Tag) {
01890   switch (Tag) {
01891   case TTK_Struct: return 0;
01892   case TTK_Interface: return 1;
01893   case TTK_Class: return 2;
01894   default: llvm_unreachable("Invalid tag kind for field padding diagnostic!");
01895   }
01896 }
01897 
01898 void RecordLayoutBuilder::CheckFieldPadding(uint64_t Offset,
01899                                             uint64_t UnpaddedOffset,
01900                                             uint64_t UnpackedOffset,
01901                                             unsigned UnpackedAlign,
01902                                             bool isPacked,
01903                                             const FieldDecl *D) {
01904   // We let objc ivars without warning, objc interfaces generally are not used
01905   // for padding tricks.
01906   if (isa<ObjCIvarDecl>(D))
01907     return;
01908 
01909   // Don't warn about structs created without a SourceLocation.  This can
01910   // be done by clients of the AST, such as codegen.
01911   if (D->getLocation().isInvalid())
01912     return;
01913   
01914   unsigned CharBitNum = Context.getTargetInfo().getCharWidth();
01915 
01916   // Warn if padding was introduced to the struct/class.
01917   if (!IsUnion && Offset > UnpaddedOffset) {
01918     unsigned PadSize = Offset - UnpaddedOffset;
01919     bool InBits = true;
01920     if (PadSize % CharBitNum == 0) {
01921       PadSize = PadSize / CharBitNum;
01922       InBits = false;
01923     }
01924     if (D->getIdentifier())
01925       Diag(D->getLocation(), diag::warn_padded_struct_field)
01926           << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
01927           << Context.getTypeDeclType(D->getParent())
01928           << PadSize
01929           << (InBits ? 1 : 0) /*(byte|bit)*/ << (PadSize > 1) // plural or not
01930           << D->getIdentifier();
01931     else
01932       Diag(D->getLocation(), diag::warn_padded_struct_anon_field)
01933           << getPaddingDiagFromTagKind(D->getParent()->getTagKind())
01934           << Context.getTypeDeclType(D->getParent())
01935           << PadSize
01936           << (InBits ? 1 : 0) /*(byte|bit)*/ << (PadSize > 1); // plural or not
01937   }
01938 
01939   // Warn if we packed it unnecessarily. If the alignment is 1 byte don't
01940   // bother since there won't be alignment issues.
01941   if (isPacked && UnpackedAlign > CharBitNum && Offset == UnpackedOffset)
01942     Diag(D->getLocation(), diag::warn_unnecessary_packed)
01943         << D->getIdentifier();
01944 }
01945 
01946 static const CXXMethodDecl *computeKeyFunction(ASTContext &Context,
01947                                                const CXXRecordDecl *RD) {
01948   // If a class isn't polymorphic it doesn't have a key function.
01949   if (!RD->isPolymorphic())
01950     return nullptr;
01951 
01952   // A class that is not externally visible doesn't have a key function. (Or
01953   // at least, there's no point to assigning a key function to such a class;
01954   // this doesn't affect the ABI.)
01955   if (!RD->isExternallyVisible())
01956     return nullptr;
01957 
01958   // Template instantiations don't have key functions per Itanium C++ ABI 5.2.6.
01959   // Same behavior as GCC.
01960   TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
01961   if (TSK == TSK_ImplicitInstantiation ||
01962       TSK == TSK_ExplicitInstantiationDeclaration ||
01963       TSK == TSK_ExplicitInstantiationDefinition)
01964     return nullptr;
01965 
01966   bool allowInlineFunctions =
01967     Context.getTargetInfo().getCXXABI().canKeyFunctionBeInline();
01968 
01969   for (const CXXMethodDecl *MD : RD->methods()) {
01970     if (!MD->isVirtual())
01971       continue;
01972 
01973     if (MD->isPure())
01974       continue;
01975 
01976     // Ignore implicit member functions, they are always marked as inline, but
01977     // they don't have a body until they're defined.
01978     if (MD->isImplicit())
01979       continue;
01980 
01981     if (MD->isInlineSpecified())
01982       continue;
01983 
01984     if (MD->hasInlineBody())
01985       continue;
01986 
01987     // Ignore inline deleted or defaulted functions.
01988     if (!MD->isUserProvided())
01989       continue;
01990 
01991     // In certain ABIs, ignore functions with out-of-line inline definitions.
01992     if (!allowInlineFunctions) {
01993       const FunctionDecl *Def;
01994       if (MD->hasBody(Def) && Def->isInlineSpecified())
01995         continue;
01996     }
01997 
01998     // We found it.
01999     return MD;
02000   }
02001 
02002   return nullptr;
02003 }
02004 
02005 DiagnosticBuilder
02006 RecordLayoutBuilder::Diag(SourceLocation Loc, unsigned DiagID) {
02007   return Context.getDiagnostics().Report(Loc, DiagID);
02008 }
02009 
02010 /// Does the target C++ ABI require us to skip over the tail-padding
02011 /// of the given class (considering it as a base class) when allocating
02012 /// objects?
02013 static bool mustSkipTailPadding(TargetCXXABI ABI, const CXXRecordDecl *RD) {
02014   switch (ABI.getTailPaddingUseRules()) {
02015   case TargetCXXABI::AlwaysUseTailPadding:
02016     return false;
02017 
02018   case TargetCXXABI::UseTailPaddingUnlessPOD03:
02019     // FIXME: To the extent that this is meant to cover the Itanium ABI
02020     // rules, we should implement the restrictions about over-sized
02021     // bitfields:
02022     //
02023     // http://mentorembedded.github.com/cxx-abi/abi.html#POD :
02024     //   In general, a type is considered a POD for the purposes of
02025     //   layout if it is a POD type (in the sense of ISO C++
02026     //   [basic.types]). However, a POD-struct or POD-union (in the
02027     //   sense of ISO C++ [class]) with a bitfield member whose
02028     //   declared width is wider than the declared type of the
02029     //   bitfield is not a POD for the purpose of layout.  Similarly,
02030     //   an array type is not a POD for the purpose of layout if the
02031     //   element type of the array is not a POD for the purpose of
02032     //   layout.
02033     //
02034     //   Where references to the ISO C++ are made in this paragraph,
02035     //   the Technical Corrigendum 1 version of the standard is
02036     //   intended.
02037     return RD->isPOD();
02038 
02039   case TargetCXXABI::UseTailPaddingUnlessPOD11:
02040     // This is equivalent to RD->getTypeForDecl().isCXX11PODType(),
02041     // but with a lot of abstraction penalty stripped off.  This does
02042     // assume that these properties are set correctly even in C++98
02043     // mode; fortunately, that is true because we want to assign
02044     // consistently semantics to the type-traits intrinsics (or at
02045     // least as many of them as possible).
02046     return RD->isTrivial() && RD->isStandardLayout();
02047   }
02048 
02049   llvm_unreachable("bad tail-padding use kind");
02050 }
02051 
02052 static bool isMsLayout(const RecordDecl* D) {
02053   return D->getASTContext().getTargetInfo().getCXXABI().isMicrosoft();
02054 }
02055 
02056 // This section contains an implementation of struct layout that is, up to the
02057 // included tests, compatible with cl.exe (2013).  The layout produced is
02058 // significantly different than those produced by the Itanium ABI.  Here we note
02059 // the most important differences.
02060 //
02061 // * The alignment of bitfields in unions is ignored when computing the
02062 //   alignment of the union.
02063 // * The existence of zero-width bitfield that occurs after anything other than
02064 //   a non-zero length bitfield is ignored.
02065 // * There is no explicit primary base for the purposes of layout.  All bases
02066 //   with vfptrs are laid out first, followed by all bases without vfptrs.
02067 // * The Itanium equivalent vtable pointers are split into a vfptr (virtual
02068 //   function pointer) and a vbptr (virtual base pointer).  They can each be
02069 //   shared with a, non-virtual bases. These bases need not be the same.  vfptrs
02070 //   always occur at offset 0.  vbptrs can occur at an arbitrary offset and are
02071 //   placed after the lexiographically last non-virtual base.  This placement
02072 //   is always before fields but can be in the middle of the non-virtual bases
02073 //   due to the two-pass layout scheme for non-virtual-bases.
02074 // * Virtual bases sometimes require a 'vtordisp' field that is laid out before
02075 //   the virtual base and is used in conjunction with virtual overrides during
02076 //   construction and destruction.  This is always a 4 byte value and is used as
02077 //   an alternative to constructor vtables.
02078 // * vtordisps are allocated in a block of memory with size and alignment equal
02079 //   to the alignment of the completed structure (before applying __declspec(
02080 //   align())).  The vtordisp always occur at the end of the allocation block,
02081 //   immediately prior to the virtual base.
02082 // * vfptrs are injected after all bases and fields have been laid out.  In
02083 //   order to guarantee proper alignment of all fields, the vfptr injection
02084 //   pushes all bases and fields back by the alignment imposed by those bases
02085 //   and fields.  This can potentially add a significant amount of padding.
02086 //   vfptrs are always injected at offset 0.
02087 // * vbptrs are injected after all bases and fields have been laid out.  In
02088 //   order to guarantee proper alignment of all fields, the vfptr injection
02089 //   pushes all bases and fields back by the alignment imposed by those bases
02090 //   and fields.  This can potentially add a significant amount of padding.
02091 //   vbptrs are injected immediately after the last non-virtual base as
02092 //   lexiographically ordered in the code.  If this site isn't pointer aligned
02093 //   the vbptr is placed at the next properly aligned location.  Enough padding
02094 //   is added to guarantee a fit.
02095 // * The last zero sized non-virtual base can be placed at the end of the
02096 //   struct (potentially aliasing another object), or may alias with the first
02097 //   field, even if they are of the same type.
02098 // * The last zero size virtual base may be placed at the end of the struct
02099 //   potentially aliasing another object.
02100 // * The ABI attempts to avoid aliasing of zero sized bases by adding padding
02101 //   between bases or vbases with specific properties.  The criteria for
02102 //   additional padding between two bases is that the first base is zero sized
02103 //   or ends with a zero sized subobject and the second base is zero sized or
02104 //   trails with a zero sized base or field (sharing of vfptrs can reorder the
02105 //   layout of the so the leading base is not always the first one declared).
02106 //   This rule does take into account fields that are not records, so padding
02107 //   will occur even if the last field is, e.g. an int. The padding added for
02108 //   bases is 1 byte.  The padding added between vbases depends on the alignment
02109 //   of the object but is at least 4 bytes (in both 32 and 64 bit modes).
02110 // * There is no concept of non-virtual alignment, non-virtual alignment and
02111 //   alignment are always identical.
02112 // * There is a distinction between alignment and required alignment.
02113 //   __declspec(align) changes the required alignment of a struct.  This
02114 //   alignment is _always_ obeyed, even in the presence of #pragma pack. A
02115 //   record inherits required alignment from all of its fields and bases.
02116 // * __declspec(align) on bitfields has the effect of changing the bitfield's
02117 //   alignment instead of its required alignment.  This is the only known way
02118 //   to make the alignment of a struct bigger than 8.  Interestingly enough
02119 //   this alignment is also immune to the effects of #pragma pack and can be
02120 //   used to create structures with large alignment under #pragma pack.
02121 //   However, because it does not impact required alignment, such a structure,
02122 //   when used as a field or base, will not be aligned if #pragma pack is
02123 //   still active at the time of use.
02124 //
02125 // Known incompatibilities:
02126 // * all: #pragma pack between fields in a record
02127 // * 2010 and back: If the last field in a record is a bitfield, every object
02128 //   laid out after the record will have extra padding inserted before it.  The
02129 //   extra padding will have size equal to the size of the storage class of the
02130 //   bitfield.  0 sized bitfields don't exhibit this behavior and the extra
02131 //   padding can be avoided by adding a 0 sized bitfield after the non-zero-
02132 //   sized bitfield.
02133 // * 2012 and back: In 64-bit mode, if the alignment of a record is 16 or
02134 //   greater due to __declspec(align()) then a second layout phase occurs after
02135 //   The locations of the vf and vb pointers are known.  This layout phase
02136 //   suffers from the "last field is a bitfield" bug in 2010 and results in
02137 //   _every_ field getting padding put in front of it, potentially including the
02138 //   vfptr, leaving the vfprt at a non-zero location which results in a fault if
02139 //   anything tries to read the vftbl.  The second layout phase also treats
02140 //   bitfields as separate entities and gives them each storage rather than
02141 //   packing them.  Additionally, because this phase appears to perform a
02142 //   (an unstable) sort on the members before laying them out and because merged
02143 //   bitfields have the same address, the bitfields end up in whatever order
02144 //   the sort left them in, a behavior we could never hope to replicate.
02145 
02146 namespace {
02147 struct MicrosoftRecordLayoutBuilder {
02148   struct ElementInfo {
02149     CharUnits Size;
02150     CharUnits Alignment;
02151   };
02152   typedef llvm::DenseMap<const CXXRecordDecl *, CharUnits> BaseOffsetsMapTy;
02153   MicrosoftRecordLayoutBuilder(const ASTContext &Context) : Context(Context) {}
02154 private:
02155   MicrosoftRecordLayoutBuilder(const MicrosoftRecordLayoutBuilder &)
02156   LLVM_DELETED_FUNCTION;
02157   void operator=(const MicrosoftRecordLayoutBuilder &) LLVM_DELETED_FUNCTION;
02158 public:
02159   void layout(const RecordDecl *RD);
02160   void cxxLayout(const CXXRecordDecl *RD);
02161   /// \brief Initializes size and alignment and honors some flags.
02162   void initializeLayout(const RecordDecl *RD);
02163   /// \brief Initialized C++ layout, compute alignment and virtual alignment and
02164   /// existence of vfptrs and vbptrs.  Alignment is needed before the vfptr is
02165   /// laid out.
02166   void initializeCXXLayout(const CXXRecordDecl *RD);
02167   void layoutNonVirtualBases(const CXXRecordDecl *RD);
02168   void layoutNonVirtualBase(const CXXRecordDecl *BaseDecl,
02169                             const ASTRecordLayout &BaseLayout,
02170                             const ASTRecordLayout *&PreviousBaseLayout);
02171   void injectVFPtr(const CXXRecordDecl *RD);
02172   void injectVBPtr(const CXXRecordDecl *RD);
02173   /// \brief Lays out the fields of the record.  Also rounds size up to
02174   /// alignment.
02175   void layoutFields(const RecordDecl *RD);
02176   void layoutField(const FieldDecl *FD);
02177   void layoutBitField(const FieldDecl *FD);
02178   /// \brief Lays out a single zero-width bit-field in the record and handles
02179   /// special cases associated with zero-width bit-fields.
02180   void layoutZeroWidthBitField(const FieldDecl *FD);
02181   void layoutVirtualBases(const CXXRecordDecl *RD);
02182   void finalizeLayout(const RecordDecl *RD);
02183   /// \brief Gets the size and alignment of a base taking pragma pack and
02184   /// __declspec(align) into account.
02185   ElementInfo getAdjustedElementInfo(const ASTRecordLayout &Layout);
02186   /// \brief Gets the size and alignment of a field taking pragma  pack and
02187   /// __declspec(align) into account.  It also updates RequiredAlignment as a
02188   /// side effect because it is most convenient to do so here.
02189   ElementInfo getAdjustedElementInfo(const FieldDecl *FD);
02190   /// \brief Places a field at an offset in CharUnits.
02191   void placeFieldAtOffset(CharUnits FieldOffset) {
02192     FieldOffsets.push_back(Context.toBits(FieldOffset));
02193   }
02194   /// \brief Places a bitfield at a bit offset.
02195   void placeFieldAtBitOffset(uint64_t FieldOffset) {
02196     FieldOffsets.push_back(FieldOffset);
02197   }
02198   /// \brief Compute the set of virtual bases for which vtordisps are required.
02199   void computeVtorDispSet(
02200       llvm::SmallPtrSetImpl<const CXXRecordDecl *> &HasVtorDispSet,
02201       const CXXRecordDecl *RD) const;
02202   const ASTContext &Context;
02203   /// \brief The size of the record being laid out.
02204   CharUnits Size;
02205   /// \brief The non-virtual size of the record layout.
02206   CharUnits NonVirtualSize;
02207   /// \brief The data size of the record layout.
02208   CharUnits DataSize;
02209   /// \brief The current alignment of the record layout.
02210   CharUnits Alignment;
02211   /// \brief The maximum allowed field alignment. This is set by #pragma pack.
02212   CharUnits MaxFieldAlignment;
02213   /// \brief The alignment that this record must obey.  This is imposed by
02214   /// __declspec(align()) on the record itself or one of its fields or bases.
02215   CharUnits RequiredAlignment;
02216   /// \brief The size of the allocation of the currently active bitfield.
02217   /// This value isn't meaningful unless LastFieldIsNonZeroWidthBitfield
02218   /// is true.
02219   CharUnits CurrentBitfieldSize;
02220   /// \brief Offset to the virtual base table pointer (if one exists).
02221   CharUnits VBPtrOffset;
02222   /// \brief Minimum record size possible.
02223   CharUnits MinEmptyStructSize;
02224   /// \brief The size and alignment info of a pointer.
02225   ElementInfo PointerInfo;
02226   /// \brief The primary base class (if one exists).
02227   const CXXRecordDecl *PrimaryBase;
02228   /// \brief The class we share our vb-pointer with.
02229   const CXXRecordDecl *SharedVBPtrBase;
02230   /// \brief The collection of field offsets.
02231   SmallVector<uint64_t, 16> FieldOffsets;
02232   /// \brief Base classes and their offsets in the record.
02233   BaseOffsetsMapTy Bases;
02234   /// \brief virtual base classes and their offsets in the record.
02235   ASTRecordLayout::VBaseOffsetsMapTy VBases;
02236   /// \brief The number of remaining bits in our last bitfield allocation.
02237   /// This value isn't meaningful unless LastFieldIsNonZeroWidthBitfield is
02238   /// true.
02239   unsigned RemainingBitsInField;
02240   bool IsUnion : 1;
02241   /// \brief True if the last field laid out was a bitfield and was not 0
02242   /// width.
02243   bool LastFieldIsNonZeroWidthBitfield : 1;
02244   /// \brief True if the class has its own vftable pointer.
02245   bool HasOwnVFPtr : 1;
02246   /// \brief True if the class has a vbtable pointer.
02247   bool HasVBPtr : 1;
02248   /// \brief True if the last sub-object within the type is zero sized or the
02249   /// object itself is zero sized.  This *does not* count members that are not
02250   /// records.  Only used for MS-ABI.
02251   bool EndsWithZeroSizedObject : 1;
02252   /// \brief True if this class is zero sized or first base is zero sized or
02253   /// has this property.  Only used for MS-ABI.
02254   bool LeadsWithZeroSizedBase : 1;
02255 };
02256 } // namespace
02257 
02258 MicrosoftRecordLayoutBuilder::ElementInfo
02259 MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(
02260     const ASTRecordLayout &Layout) {
02261   ElementInfo Info;
02262   Info.Alignment = Layout.getAlignment();
02263   // Respect pragma pack.
02264   if (!MaxFieldAlignment.isZero())
02265     Info.Alignment = std::min(Info.Alignment, MaxFieldAlignment);
02266   // Track zero-sized subobjects here where it's already available.
02267   EndsWithZeroSizedObject = Layout.hasZeroSizedSubObject();
02268   // Respect required alignment, this is necessary because we may have adjusted
02269   // the alignment in the case of pragam pack.  Note that the required alignment
02270   // doesn't actually apply to the struct alignment at this point.
02271   Alignment = std::max(Alignment, Info.Alignment);
02272   RequiredAlignment = std::max(RequiredAlignment, Layout.getRequiredAlignment());
02273   Info.Alignment = std::max(Info.Alignment, Layout.getRequiredAlignment());
02274   Info.Size = Layout.getNonVirtualSize();
02275   return Info;
02276 }
02277 
02278 MicrosoftRecordLayoutBuilder::ElementInfo
02279 MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(
02280     const FieldDecl *FD) {
02281   // Get the alignment of the field type's natural alignment, ignore any
02282   // alignment attributes.
02283   ElementInfo Info;
02284   std::tie(Info.Size, Info.Alignment) =
02285       Context.getTypeInfoInChars(FD->getType()->getUnqualifiedDesugaredType());
02286   // Respect align attributes on the field.
02287   CharUnits FieldRequiredAlignment =
02288       Context.toCharUnitsFromBits(FD->getMaxAlignment());
02289   // Respect align attributes on the type.
02290   if (Context.isAlignmentRequired(FD->getType()))
02291     FieldRequiredAlignment = std::max(
02292         Context.getTypeAlignInChars(FD->getType()), FieldRequiredAlignment);
02293   // Respect attributes applied to subobjects of the field.
02294   if (FD->isBitField())
02295     // For some reason __declspec align impacts alignment rather than required
02296     // alignment when it is applied to bitfields.
02297     Info.Alignment = std::max(Info.Alignment, FieldRequiredAlignment);
02298   else {
02299     if (auto RT =
02300             FD->getType()->getBaseElementTypeUnsafe()->getAs<RecordType>()) {
02301       auto const &Layout = Context.getASTRecordLayout(RT->getDecl());
02302       EndsWithZeroSizedObject = Layout.hasZeroSizedSubObject();
02303       FieldRequiredAlignment = std::max(FieldRequiredAlignment,
02304                                         Layout.getRequiredAlignment());
02305     }
02306     // Capture required alignment as a side-effect.
02307     RequiredAlignment = std::max(RequiredAlignment, FieldRequiredAlignment);
02308   }
02309   // Respect pragma pack, attribute pack and declspec align
02310   if (!MaxFieldAlignment.isZero())
02311     Info.Alignment = std::min(Info.Alignment, MaxFieldAlignment);
02312   if (FD->hasAttr<PackedAttr>())
02313     Info.Alignment = CharUnits::One();
02314   Info.Alignment = std::max(Info.Alignment, FieldRequiredAlignment);
02315   return Info;
02316 }
02317 
02318 void MicrosoftRecordLayoutBuilder::layout(const RecordDecl *RD) {
02319   // For C record layout, zero-sized records always have size 4.
02320   MinEmptyStructSize = CharUnits::fromQuantity(4);
02321   initializeLayout(RD);
02322   layoutFields(RD);
02323   DataSize = Size = Size.RoundUpToAlignment(Alignment);
02324   RequiredAlignment = std::max(
02325       RequiredAlignment, Context.toCharUnitsFromBits(RD->getMaxAlignment()));
02326   finalizeLayout(RD);
02327 }
02328 
02329 void MicrosoftRecordLayoutBuilder::cxxLayout(const CXXRecordDecl *RD) {
02330   // The C++ standard says that empty structs have size 1.
02331   MinEmptyStructSize = CharUnits::One();
02332   initializeLayout(RD);
02333   initializeCXXLayout(RD);
02334   layoutNonVirtualBases(RD);
02335   layoutFields(RD);
02336   injectVBPtr(RD);
02337   injectVFPtr(RD);
02338   if (HasOwnVFPtr || (HasVBPtr && !SharedVBPtrBase))
02339     Alignment = std::max(Alignment, PointerInfo.Alignment);
02340   auto RoundingAlignment = Alignment;
02341   if (!MaxFieldAlignment.isZero())
02342     RoundingAlignment = std::min(RoundingAlignment, MaxFieldAlignment);
02343   NonVirtualSize = Size = Size.RoundUpToAlignment(RoundingAlignment);
02344   RequiredAlignment = std::max(
02345       RequiredAlignment, Context.toCharUnitsFromBits(RD->getMaxAlignment()));
02346   layoutVirtualBases(RD);
02347   finalizeLayout(RD);
02348 }
02349 
02350 void MicrosoftRecordLayoutBuilder::initializeLayout(const RecordDecl *RD) {
02351   IsUnion = RD->isUnion();
02352   Size = CharUnits::Zero();
02353   Alignment = CharUnits::One();
02354   // In 64-bit mode we always perform an alignment step after laying out vbases.
02355   // In 32-bit mode we do not.  The check to see if we need to perform alignment
02356   // checks the RequiredAlignment field and performs alignment if it isn't 0.
02357   RequiredAlignment = Context.getTargetInfo().getPointerWidth(0) == 64 ?
02358                       CharUnits::One() : CharUnits::Zero();
02359   // Compute the maximum field alignment.
02360   MaxFieldAlignment = CharUnits::Zero();
02361   // Honor the default struct packing maximum alignment flag.
02362   if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct)
02363       MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
02364   // Honor the packing attribute.  The MS-ABI ignores pragma pack if its larger
02365   // than the pointer size.
02366   if (const MaxFieldAlignmentAttr *MFAA = RD->getAttr<MaxFieldAlignmentAttr>()){
02367     unsigned PackedAlignment = MFAA->getAlignment();
02368     if (PackedAlignment <= Context.getTargetInfo().getPointerWidth(0))
02369       MaxFieldAlignment = Context.toCharUnitsFromBits(PackedAlignment);
02370   }
02371   // Packed attribute forces max field alignment to be 1.
02372   if (RD->hasAttr<PackedAttr>())
02373     MaxFieldAlignment = CharUnits::One();
02374 }
02375 
02376 void
02377 MicrosoftRecordLayoutBuilder::initializeCXXLayout(const CXXRecordDecl *RD) {
02378   EndsWithZeroSizedObject = false;
02379   LeadsWithZeroSizedBase = false;
02380   HasOwnVFPtr = false;
02381   HasVBPtr = false;
02382   PrimaryBase = nullptr;
02383   SharedVBPtrBase = nullptr;
02384   // Calculate pointer size and alignment.  These are used for vfptr and vbprt
02385   // injection.
02386   PointerInfo.Size =
02387       Context.toCharUnitsFromBits(Context.getTargetInfo().getPointerWidth(0));
02388   PointerInfo.Alignment = PointerInfo.Size;
02389   // Respect pragma pack.
02390   if (!MaxFieldAlignment.isZero())
02391     PointerInfo.Alignment = std::min(PointerInfo.Alignment, MaxFieldAlignment);
02392 }
02393 
02394 void
02395 MicrosoftRecordLayoutBuilder::layoutNonVirtualBases(const CXXRecordDecl *RD) {
02396   // The MS-ABI lays out all bases that contain leading vfptrs before it lays
02397   // out any bases that do not contain vfptrs.  We implement this as two passes
02398   // over the bases.  This approach guarantees that the primary base is laid out
02399   // first.  We use these passes to calculate some additional aggregated
02400   // information about the bases, such as reqruied alignment and the presence of
02401   // zero sized members.
02402   const ASTRecordLayout *PreviousBaseLayout = nullptr;
02403   // Iterate through the bases and lay out the non-virtual ones.
02404   for (const CXXBaseSpecifier &Base : RD->bases()) {
02405     const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
02406     const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
02407     // Mark and skip virtual bases.
02408     if (Base.isVirtual()) {
02409       HasVBPtr = true;
02410       continue;
02411     }
02412     // Check fo a base to share a VBPtr with.
02413     if (!SharedVBPtrBase && BaseLayout.hasVBPtr()) {
02414       SharedVBPtrBase = BaseDecl;
02415       HasVBPtr = true;
02416     }
02417     // Only lay out bases with extendable VFPtrs on the first pass.
02418     if (!BaseLayout.hasExtendableVFPtr())
02419       continue;
02420     // If we don't have a primary base, this one qualifies.
02421     if (!PrimaryBase) {
02422       PrimaryBase = BaseDecl;
02423       LeadsWithZeroSizedBase = BaseLayout.leadsWithZeroSizedBase();
02424     }
02425     // Lay out the base.
02426     layoutNonVirtualBase(BaseDecl, BaseLayout, PreviousBaseLayout);
02427   }
02428   // Figure out if we need a fresh VFPtr for this class.
02429   if (!PrimaryBase && RD->isDynamicClass())
02430     for (CXXRecordDecl::method_iterator i = RD->method_begin(),
02431                                         e = RD->method_end();
02432          !HasOwnVFPtr && i != e; ++i)
02433       HasOwnVFPtr = i->isVirtual() && i->size_overridden_methods() == 0;
02434   // If we don't have a primary base then we have a leading object that could
02435   // itself lead with a zero-sized object, something we track.
02436   bool CheckLeadingLayout = !PrimaryBase;
02437   // Iterate through the bases and lay out the non-virtual ones.
02438   for (const CXXBaseSpecifier &Base : RD->bases()) {
02439     if (Base.isVirtual())
02440       continue;
02441     const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
02442     const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
02443     // Only lay out bases without extendable VFPtrs on the second pass.
02444     if (BaseLayout.hasExtendableVFPtr()) {
02445       VBPtrOffset = Bases[BaseDecl] + BaseLayout.getNonVirtualSize();
02446       continue;
02447     }
02448     // If this is the first layout, check to see if it leads with a zero sized
02449     // object.  If it does, so do we.
02450     if (CheckLeadingLayout) {
02451       CheckLeadingLayout = false;
02452       LeadsWithZeroSizedBase = BaseLayout.leadsWithZeroSizedBase();
02453     }
02454     // Lay out the base.
02455     layoutNonVirtualBase(BaseDecl, BaseLayout, PreviousBaseLayout);
02456     VBPtrOffset = Bases[BaseDecl] + BaseLayout.getNonVirtualSize();
02457   }
02458   // Set our VBPtroffset if we know it at this point.
02459   if (!HasVBPtr)
02460     VBPtrOffset = CharUnits::fromQuantity(-1);
02461   else if (SharedVBPtrBase) {
02462     const ASTRecordLayout &Layout = Context.getASTRecordLayout(SharedVBPtrBase);
02463     VBPtrOffset = Bases[SharedVBPtrBase] + Layout.getVBPtrOffset();
02464   }
02465 }
02466 
02467 void MicrosoftRecordLayoutBuilder::layoutNonVirtualBase(
02468     const CXXRecordDecl *BaseDecl,
02469     const ASTRecordLayout &BaseLayout,
02470     const ASTRecordLayout *&PreviousBaseLayout) {
02471   // Insert padding between two bases if the left first one is zero sized or
02472   // contains a zero sized subobject and the right is zero sized or one leads
02473   // with a zero sized base.
02474   if (PreviousBaseLayout && PreviousBaseLayout->hasZeroSizedSubObject() &&
02475       BaseLayout.leadsWithZeroSizedBase())
02476     Size++;
02477   ElementInfo Info = getAdjustedElementInfo(BaseLayout);
02478   CharUnits BaseOffset = Size.RoundUpToAlignment(Info.Alignment);
02479   Bases.insert(std::make_pair(BaseDecl, BaseOffset));
02480   Size = BaseOffset + BaseLayout.getNonVirtualSize();
02481   PreviousBaseLayout = &BaseLayout;
02482 }
02483 
02484 void MicrosoftRecordLayoutBuilder::layoutFields(const RecordDecl *RD) {
02485   LastFieldIsNonZeroWidthBitfield = false;
02486   for (const FieldDecl *Field : RD->fields())
02487     layoutField(Field);
02488 }
02489 
02490 void MicrosoftRecordLayoutBuilder::layoutField(const FieldDecl *FD) {
02491   if (FD->isBitField()) {
02492     layoutBitField(FD);
02493     return;
02494   }
02495   LastFieldIsNonZeroWidthBitfield = false;
02496   ElementInfo Info = getAdjustedElementInfo(FD);
02497   Alignment = std::max(Alignment, Info.Alignment);
02498   if (IsUnion) {
02499     placeFieldAtOffset(CharUnits::Zero());
02500     Size = std::max(Size, Info.Size);
02501   } else {
02502     CharUnits FieldOffset = Size.RoundUpToAlignment(Info.Alignment);
02503     placeFieldAtOffset(FieldOffset);
02504     Size = FieldOffset + Info.Size;
02505   }
02506 }
02507 
02508 void MicrosoftRecordLayoutBuilder::layoutBitField(const FieldDecl *FD) {
02509   unsigned Width = FD->getBitWidthValue(Context);
02510   if (Width == 0) {
02511     layoutZeroWidthBitField(FD);
02512     return;
02513   }
02514   ElementInfo Info = getAdjustedElementInfo(FD);
02515   // Clamp the bitfield to a containable size for the sake of being able
02516   // to lay them out.  Sema will throw an error.
02517   if (Width > Context.toBits(Info.Size))
02518     Width = Context.toBits(Info.Size);
02519   // Check to see if this bitfield fits into an existing allocation.  Note:
02520   // MSVC refuses to pack bitfields of formal types with different sizes
02521   // into the same allocation.
02522   if (!IsUnion && LastFieldIsNonZeroWidthBitfield &&
02523       CurrentBitfieldSize == Info.Size && Width <= RemainingBitsInField) {
02524     placeFieldAtBitOffset(Context.toBits(Size) - RemainingBitsInField);
02525     RemainingBitsInField -= Width;
02526     return;
02527   }
02528   LastFieldIsNonZeroWidthBitfield = true;
02529   CurrentBitfieldSize = Info.Size;
02530   if (IsUnion) {
02531     placeFieldAtOffset(CharUnits::Zero());
02532     Size = std::max(Size, Info.Size);
02533     // TODO: Add a Sema warning that MS ignores bitfield alignment in unions.
02534   } else {
02535     // Allocate a new block of memory and place the bitfield in it.
02536     CharUnits FieldOffset = Size.RoundUpToAlignment(Info.Alignment);
02537     placeFieldAtOffset(FieldOffset);
02538     Size = FieldOffset + Info.Size;
02539     Alignment = std::max(Alignment, Info.Alignment);
02540     RemainingBitsInField = Context.toBits(Info.Size) - Width;
02541   }
02542 }
02543 
02544 void
02545 MicrosoftRecordLayoutBuilder::layoutZeroWidthBitField(const FieldDecl *FD) {
02546   // Zero-width bitfields are ignored unless they follow a non-zero-width
02547   // bitfield.
02548   if (!LastFieldIsNonZeroWidthBitfield) {
02549     placeFieldAtOffset(IsUnion ? CharUnits::Zero() : Size);
02550     // TODO: Add a Sema warning that MS ignores alignment for zero
02551     // sized bitfields that occur after zero-size bitfields or non-bitfields.
02552     return;
02553   }
02554   LastFieldIsNonZeroWidthBitfield = false;
02555   ElementInfo Info = getAdjustedElementInfo(FD);
02556   if (IsUnion) {
02557     placeFieldAtOffset(CharUnits::Zero());
02558     Size = std::max(Size, Info.Size);
02559     // TODO: Add a Sema warning that MS ignores bitfield alignment in unions.
02560   } else {
02561     // Round up the current record size to the field's alignment boundary.
02562     CharUnits FieldOffset = Size.RoundUpToAlignment(Info.Alignment);
02563     placeFieldAtOffset(FieldOffset);
02564     Size = FieldOffset;
02565     Alignment = std::max(Alignment, Info.Alignment);
02566   }
02567 }
02568 
02569 void MicrosoftRecordLayoutBuilder::injectVBPtr(const CXXRecordDecl *RD) {
02570   if (!HasVBPtr || SharedVBPtrBase)
02571     return;
02572   // Inject the VBPointer at the injection site.
02573   CharUnits InjectionSite = VBPtrOffset;
02574   // But before we do, make sure it's properly aligned.
02575   VBPtrOffset = VBPtrOffset.RoundUpToAlignment(PointerInfo.Alignment);
02576   // Determine where the first field should be laid out after the vbptr.
02577   CharUnits FieldStart = VBPtrOffset + PointerInfo.Size;
02578   // Make sure that the amount we push the fields back by is a multiple of the
02579   // alignment.
02580   CharUnits Offset = (FieldStart - InjectionSite).RoundUpToAlignment(
02581       std::max(RequiredAlignment, Alignment));
02582   // Increase the size of the object and push back all fields by the offset
02583   // amount.
02584   Size += Offset;
02585   for (uint64_t &FieldOffset : FieldOffsets)
02586     FieldOffset += Context.toBits(Offset);
02587   for (BaseOffsetsMapTy::value_type &Base : Bases)
02588     if (Base.second >= InjectionSite)
02589       Base.second += Offset;
02590 }
02591 
02592 void MicrosoftRecordLayoutBuilder::injectVFPtr(const CXXRecordDecl *RD) {
02593   if (!HasOwnVFPtr)
02594     return;
02595   // Make sure that the amount we push the struct back by is a multiple of the
02596   // alignment.
02597   CharUnits Offset = PointerInfo.Size.RoundUpToAlignment(
02598       std::max(RequiredAlignment, Alignment));
02599   // Increase the size of the object and push back all fields, the vbptr and all
02600   // bases by the offset amount.
02601   Size += Offset;
02602   for (uint64_t &FieldOffset : FieldOffsets)
02603     FieldOffset += Context.toBits(Offset);
02604   if (HasVBPtr)
02605     VBPtrOffset += Offset;
02606   for (BaseOffsetsMapTy::value_type &Base : Bases)
02607     Base.second += Offset;
02608 }
02609 
02610 void MicrosoftRecordLayoutBuilder::layoutVirtualBases(const CXXRecordDecl *RD) {
02611   if (!HasVBPtr)
02612     return;
02613   // Vtordisps are always 4 bytes (even in 64-bit mode)
02614   CharUnits VtorDispSize = CharUnits::fromQuantity(4);
02615   CharUnits VtorDispAlignment = VtorDispSize;
02616   // vtordisps respect pragma pack.
02617   if (!MaxFieldAlignment.isZero())
02618     VtorDispAlignment = std::min(VtorDispAlignment, MaxFieldAlignment);
02619   // The alignment of the vtordisp is at least the required alignment of the
02620   // entire record.  This requirement may be present to support vtordisp
02621   // injection.
02622   for (const CXXBaseSpecifier &VBase : RD->vbases()) {
02623     const CXXRecordDecl *BaseDecl = VBase.getType()->getAsCXXRecordDecl();
02624     const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
02625     RequiredAlignment =
02626         std::max(RequiredAlignment, BaseLayout.getRequiredAlignment());
02627   }
02628   VtorDispAlignment = std::max(VtorDispAlignment, RequiredAlignment);
02629   // Compute the vtordisp set.
02630   llvm::SmallPtrSet<const CXXRecordDecl *, 2> HasVtorDispSet;
02631   computeVtorDispSet(HasVtorDispSet, RD);
02632   // Iterate through the virtual bases and lay them out.
02633   const ASTRecordLayout *PreviousBaseLayout = nullptr;
02634   for (const CXXBaseSpecifier &VBase : RD->vbases()) {
02635     const CXXRecordDecl *BaseDecl = VBase.getType()->getAsCXXRecordDecl();
02636     const ASTRecordLayout &BaseLayout = Context.getASTRecordLayout(BaseDecl);
02637     bool HasVtordisp = HasVtorDispSet.count(BaseDecl) > 0;
02638     // Insert padding between two bases if the left first one is zero sized or
02639     // contains a zero sized subobject and the right is zero sized or one leads
02640     // with a zero sized base.  The padding between virtual bases is 4
02641     // bytes (in both 32 and 64 bits modes) and always involves rounding up to
02642     // the required alignment, we don't know why.
02643     if ((PreviousBaseLayout && PreviousBaseLayout->hasZeroSizedSubObject() &&
02644         BaseLayout.leadsWithZeroSizedBase()) || HasVtordisp) {
02645       Size = Size.RoundUpToAlignment(VtorDispAlignment) + VtorDispSize;
02646       Alignment = std::max(VtorDispAlignment, Alignment);
02647     }
02648     // Insert the virtual base.
02649     ElementInfo Info = getAdjustedElementInfo(BaseLayout);
02650     CharUnits BaseOffset = Size.RoundUpToAlignment(Info.Alignment);
02651     VBases.insert(std::make_pair(BaseDecl,
02652         ASTRecordLayout::VBaseInfo(BaseOffset, HasVtordisp)));
02653     Size = BaseOffset + BaseLayout.getNonVirtualSize();
02654     PreviousBaseLayout = &BaseLayout;
02655   }
02656 }
02657 
02658 void MicrosoftRecordLayoutBuilder::finalizeLayout(const RecordDecl *RD) {
02659   // Respect required alignment.  Note that in 32-bit mode Required alignment
02660   // may be 0 and cause size not to be updated.
02661   DataSize = Size;
02662   if (!RequiredAlignment.isZero()) {
02663     Alignment = std::max(Alignment, RequiredAlignment);
02664     auto RoundingAlignment = Alignment;
02665     if (!MaxFieldAlignment.isZero())
02666       RoundingAlignment = std::min(RoundingAlignment, MaxFieldAlignment);
02667     RoundingAlignment = std::max(RoundingAlignment, RequiredAlignment);
02668     Size = Size.RoundUpToAlignment(RoundingAlignment);
02669   }
02670   if (Size.isZero()) {
02671     EndsWithZeroSizedObject = true;
02672     LeadsWithZeroSizedBase = true;
02673     // Zero-sized structures have size equal to their alignment if a
02674     // __declspec(align) came into play.
02675     if (RequiredAlignment >= MinEmptyStructSize)
02676       Size = Alignment;
02677     else
02678       Size = MinEmptyStructSize;
02679   }
02680 }
02681 
02682 // Recursively walks the non-virtual bases of a class and determines if any of
02683 // them are in the bases with overridden methods set.
02684 static bool
02685 RequiresVtordisp(const llvm::SmallPtrSetImpl<const CXXRecordDecl *> &
02686                      BasesWithOverriddenMethods,
02687                  const CXXRecordDecl *RD) {
02688   if (BasesWithOverriddenMethods.count(RD))
02689     return true;
02690   // If any of a virtual bases non-virtual bases (recursively) requires a
02691   // vtordisp than so does this virtual base.
02692   for (const CXXBaseSpecifier &Base : RD->bases())
02693     if (!Base.isVirtual() &&
02694         RequiresVtordisp(BasesWithOverriddenMethods,
02695                          Base.getType()->getAsCXXRecordDecl()))
02696       return true;
02697   return false;
02698 }
02699 
02700 void MicrosoftRecordLayoutBuilder::computeVtorDispSet(
02701     llvm::SmallPtrSetImpl<const CXXRecordDecl *> &HasVtordispSet,
02702     const CXXRecordDecl *RD) const {
02703   // /vd2 or #pragma vtordisp(2): Always use vtordisps for virtual bases with
02704   // vftables.
02705   if (RD->getMSVtorDispMode() == MSVtorDispAttr::ForVFTable) {
02706     for (const CXXBaseSpecifier &Base : RD->vbases()) {
02707       const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
02708       const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
02709       if (Layout.hasExtendableVFPtr())
02710         HasVtordispSet.insert(BaseDecl);
02711     }
02712     return;
02713   }
02714 
02715   // If any of our bases need a vtordisp for this type, so do we.  Check our
02716   // direct bases for vtordisp requirements.
02717   for (const CXXBaseSpecifier &Base : RD->bases()) {
02718     const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
02719     const ASTRecordLayout &Layout = Context.getASTRecordLayout(BaseDecl);
02720     for (const auto &bi : Layout.getVBaseOffsetsMap())
02721       if (bi.second.hasVtorDisp())
02722         HasVtordispSet.insert(bi.first);
02723   }
02724   // We don't introduce any additional vtordisps if either:
02725   // * A user declared constructor or destructor aren't declared.
02726   // * #pragma vtordisp(0) or the /vd0 flag are in use.
02727   if ((!RD->hasUserDeclaredConstructor() && !RD->hasUserDeclaredDestructor()) ||
02728       RD->getMSVtorDispMode() == MSVtorDispAttr::Never)
02729     return;
02730   // /vd1 or #pragma vtordisp(1): Try to guess based on whether we think it's
02731   // possible for a partially constructed object with virtual base overrides to
02732   // escape a non-trivial constructor.
02733   assert(RD->getMSVtorDispMode() == MSVtorDispAttr::ForVBaseOverride);
02734   // Compute a set of base classes which define methods we override.  A virtual
02735   // base in this set will require a vtordisp.  A virtual base that transitively
02736   // contains one of these bases as a non-virtual base will also require a
02737   // vtordisp.
02738   llvm::SmallPtrSet<const CXXMethodDecl *, 8> Work;
02739   llvm::SmallPtrSet<const CXXRecordDecl *, 2> BasesWithOverriddenMethods;
02740   // Seed the working set with our non-destructor, non-pure virtual methods.
02741   for (const CXXMethodDecl *MD : RD->methods())
02742     if (MD->isVirtual() && !isa<CXXDestructorDecl>(MD) && !MD->isPure())
02743       Work.insert(MD);
02744   while (!Work.empty()) {
02745     const CXXMethodDecl *MD = *Work.begin();
02746     CXXMethodDecl::method_iterator i = MD->begin_overridden_methods(),
02747                                    e = MD->end_overridden_methods();
02748     // If a virtual method has no-overrides it lives in its parent's vtable.
02749     if (i == e)
02750       BasesWithOverriddenMethods.insert(MD->getParent());
02751     else
02752       Work.insert(i, e);
02753     // We've finished processing this element, remove it from the working set.
02754     Work.erase(MD);
02755   }
02756   // For each of our virtual bases, check if it is in the set of overridden
02757   // bases or if it transitively contains a non-virtual base that is.
02758   for (const CXXBaseSpecifier &Base : RD->vbases()) {
02759     const CXXRecordDecl *BaseDecl = Base.getType()->getAsCXXRecordDecl();
02760     if (!HasVtordispSet.count(BaseDecl) &&
02761         RequiresVtordisp(BasesWithOverriddenMethods, BaseDecl))
02762       HasVtordispSet.insert(BaseDecl);
02763   }
02764 }
02765 
02766 /// \brief Get or compute information about the layout of the specified record
02767 /// (struct/union/class), which indicates its size and field position
02768 /// information.
02769 const ASTRecordLayout *
02770 ASTContext::BuildMicrosoftASTRecordLayout(const RecordDecl *D) const {
02771   MicrosoftRecordLayoutBuilder Builder(*this);
02772   if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) {
02773     Builder.cxxLayout(RD);
02774     return new (*this) ASTRecordLayout(
02775         *this, Builder.Size, Builder.Alignment, Builder.RequiredAlignment,
02776         Builder.HasOwnVFPtr,
02777         Builder.HasOwnVFPtr || Builder.PrimaryBase,
02778         Builder.VBPtrOffset, Builder.NonVirtualSize, Builder.FieldOffsets.data(),
02779         Builder.FieldOffsets.size(), Builder.NonVirtualSize,
02780         Builder.Alignment, CharUnits::Zero(), Builder.PrimaryBase,
02781         false, Builder.SharedVBPtrBase,
02782         Builder.EndsWithZeroSizedObject, Builder.LeadsWithZeroSizedBase,
02783         Builder.Bases, Builder.VBases);
02784   } else {
02785     Builder.layout(D);
02786     return new (*this) ASTRecordLayout(
02787         *this, Builder.Size, Builder.Alignment, Builder.RequiredAlignment,
02788         Builder.Size, Builder.FieldOffsets.data(), Builder.FieldOffsets.size());
02789   }
02790 }
02791 
02792 /// getASTRecordLayout - Get or compute information about the layout of the
02793 /// specified record (struct/union/class), which indicates its size and field
02794 /// position information.
02795 const ASTRecordLayout &
02796 ASTContext::getASTRecordLayout(const RecordDecl *D) const {
02797   // These asserts test different things.  A record has a definition
02798   // as soon as we begin to parse the definition.  That definition is
02799   // not a complete definition (which is what isDefinition() tests)
02800   // until we *finish* parsing the definition.
02801 
02802   if (D->hasExternalLexicalStorage() && !D->getDefinition())
02803     getExternalSource()->CompleteType(const_cast<RecordDecl*>(D));
02804     
02805   D = D->getDefinition();
02806   assert(D && "Cannot get layout of forward declarations!");
02807   assert(!D->isInvalidDecl() && "Cannot get layout of invalid decl!");
02808   assert(D->isCompleteDefinition() && "Cannot layout type before complete!");
02809 
02810   // Look up this layout, if already laid out, return what we have.
02811   // Note that we can't save a reference to the entry because this function
02812   // is recursive.
02813   const ASTRecordLayout *Entry = ASTRecordLayouts[D];
02814   if (Entry) return *Entry;
02815 
02816   const ASTRecordLayout *NewEntry = nullptr;
02817 
02818   if (isMsLayout(D) && !D->getASTContext().getExternalSource()) {
02819     NewEntry = BuildMicrosoftASTRecordLayout(D);
02820   } else if (const CXXRecordDecl *RD = dyn_cast<CXXRecordDecl>(D)) {
02821     EmptySubobjectMap EmptySubobjects(*this, RD);
02822     RecordLayoutBuilder Builder(*this, &EmptySubobjects);
02823     Builder.Layout(RD);
02824 
02825     // In certain situations, we are allowed to lay out objects in the
02826     // tail-padding of base classes.  This is ABI-dependent.
02827     // FIXME: this should be stored in the record layout.
02828     bool skipTailPadding =
02829       mustSkipTailPadding(getTargetInfo().getCXXABI(), cast<CXXRecordDecl>(D));
02830 
02831     // FIXME: This should be done in FinalizeLayout.
02832     CharUnits DataSize =
02833       skipTailPadding ? Builder.getSize() : Builder.getDataSize();
02834     CharUnits NonVirtualSize = 
02835       skipTailPadding ? DataSize : Builder.NonVirtualSize;
02836     NewEntry =
02837       new (*this) ASTRecordLayout(*this, Builder.getSize(), 
02838                                   Builder.Alignment,
02839                                   /*RequiredAlignment : used by MS-ABI)*/
02840                                   Builder.Alignment,
02841                                   Builder.HasOwnVFPtr,
02842                                   RD->isDynamicClass(),
02843                                   CharUnits::fromQuantity(-1),
02844                                   DataSize, 
02845                                   Builder.FieldOffsets.data(),
02846                                   Builder.FieldOffsets.size(),
02847                                   NonVirtualSize,
02848                                   Builder.NonVirtualAlignment,
02849                                   EmptySubobjects.SizeOfLargestEmptySubobject,
02850                                   Builder.PrimaryBase,
02851                                   Builder.PrimaryBaseIsVirtual,
02852                                   nullptr, false, false,
02853                                   Builder.Bases, Builder.VBases);
02854   } else {
02855     RecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/nullptr);
02856     Builder.Layout(D);
02857 
02858     NewEntry =
02859       new (*this) ASTRecordLayout(*this, Builder.getSize(), 
02860                                   Builder.Alignment,
02861                                   /*RequiredAlignment : used by MS-ABI)*/
02862                                   Builder.Alignment,
02863                                   Builder.getSize(),
02864                                   Builder.FieldOffsets.data(),
02865                                   Builder.FieldOffsets.size());
02866   }
02867 
02868   ASTRecordLayouts[D] = NewEntry;
02869 
02870   if (getLangOpts().DumpRecordLayouts) {
02871     llvm::outs() << "\n*** Dumping AST Record Layout\n";
02872     DumpRecordLayout(D, llvm::outs(), getLangOpts().DumpRecordLayoutsSimple);
02873   }
02874 
02875   return *NewEntry;
02876 }
02877 
02878 const CXXMethodDecl *ASTContext::getCurrentKeyFunction(const CXXRecordDecl *RD) {
02879   if (!getTargetInfo().getCXXABI().hasKeyFunctions())
02880     return nullptr;
02881 
02882   assert(RD->getDefinition() && "Cannot get key function for forward decl!");
02883   RD = cast<CXXRecordDecl>(RD->getDefinition());
02884 
02885   // Beware:
02886   //  1) computing the key function might trigger deserialization, which might
02887   //     invalidate iterators into KeyFunctions
02888   //  2) 'get' on the LazyDeclPtr might also trigger deserialization and
02889   //     invalidate the LazyDeclPtr within the map itself
02890   LazyDeclPtr Entry = KeyFunctions[RD];
02891   const Decl *Result =
02892       Entry ? Entry.get(getExternalSource()) : computeKeyFunction(*this, RD);
02893 
02894   // Store it back if it changed.
02895   if (Entry.isOffset() || Entry.isValid() != bool(Result))
02896     KeyFunctions[RD] = const_cast<Decl*>(Result);
02897 
02898   return cast_or_null<CXXMethodDecl>(Result);
02899 }
02900 
02901 void ASTContext::setNonKeyFunction(const CXXMethodDecl *Method) {
02902   assert(Method == Method->getFirstDecl() &&
02903          "not working with method declaration from class definition");
02904 
02905   // Look up the cache entry.  Since we're working with the first
02906   // declaration, its parent must be the class definition, which is
02907   // the correct key for the KeyFunctions hash.
02908   llvm::DenseMap<const CXXRecordDecl*, LazyDeclPtr>::iterator
02909     I = KeyFunctions.find(Method->getParent());
02910 
02911   // If it's not cached, there's nothing to do.
02912   if (I == KeyFunctions.end()) return;
02913 
02914   // If it is cached, check whether it's the target method, and if so,
02915   // remove it from the cache. Note, the call to 'get' might invalidate
02916   // the iterator and the LazyDeclPtr object within the map.
02917   LazyDeclPtr Ptr = I->second;
02918   if (Ptr.get(getExternalSource()) == Method) {
02919     // FIXME: remember that we did this for module / chained PCH state?
02920     KeyFunctions.erase(Method->getParent());
02921   }
02922 }
02923 
02924 static uint64_t getFieldOffset(const ASTContext &C, const FieldDecl *FD) {
02925   const ASTRecordLayout &Layout = C.getASTRecordLayout(FD->getParent());
02926   return Layout.getFieldOffset(FD->getFieldIndex());
02927 }
02928 
02929 uint64_t ASTContext::getFieldOffset(const ValueDecl *VD) const {
02930   uint64_t OffsetInBits;
02931   if (const FieldDecl *FD = dyn_cast<FieldDecl>(VD)) {
02932     OffsetInBits = ::getFieldOffset(*this, FD);
02933   } else {
02934     const IndirectFieldDecl *IFD = cast<IndirectFieldDecl>(VD);
02935 
02936     OffsetInBits = 0;
02937     for (const NamedDecl *ND : IFD->chain())
02938       OffsetInBits += ::getFieldOffset(*this, cast<FieldDecl>(ND));
02939   }
02940 
02941   return OffsetInBits;
02942 }
02943 
02944 /// getObjCLayout - Get or compute information about the layout of the
02945 /// given interface.
02946 ///
02947 /// \param Impl - If given, also include the layout of the interface's
02948 /// implementation. This may differ by including synthesized ivars.
02949 const ASTRecordLayout &
02950 ASTContext::getObjCLayout(const ObjCInterfaceDecl *D,
02951                           const ObjCImplementationDecl *Impl) const {
02952   // Retrieve the definition
02953   if (D->hasExternalLexicalStorage() && !D->getDefinition())
02954     getExternalSource()->CompleteType(const_cast<ObjCInterfaceDecl*>(D));
02955   D = D->getDefinition();
02956   assert(D && D->isThisDeclarationADefinition() && "Invalid interface decl!");
02957 
02958   // Look up this layout, if already laid out, return what we have.
02959   const ObjCContainerDecl *Key =
02960     Impl ? (const ObjCContainerDecl*) Impl : (const ObjCContainerDecl*) D;
02961   if (const ASTRecordLayout *Entry = ObjCLayouts[Key])
02962     return *Entry;
02963 
02964   // Add in synthesized ivar count if laying out an implementation.
02965   if (Impl) {
02966     unsigned SynthCount = CountNonClassIvars(D);
02967     // If there aren't any sythesized ivars then reuse the interface
02968     // entry. Note we can't cache this because we simply free all
02969     // entries later; however we shouldn't look up implementations
02970     // frequently.
02971     if (SynthCount == 0)
02972       return getObjCLayout(D, nullptr);
02973   }
02974 
02975   RecordLayoutBuilder Builder(*this, /*EmptySubobjects=*/nullptr);
02976   Builder.Layout(D);
02977 
02978   const ASTRecordLayout *NewEntry =
02979     new (*this) ASTRecordLayout(*this, Builder.getSize(), 
02980                                 Builder.Alignment,
02981                                 /*RequiredAlignment : used by MS-ABI)*/
02982                                 Builder.Alignment,
02983                                 Builder.getDataSize(),
02984                                 Builder.FieldOffsets.data(),
02985                                 Builder.FieldOffsets.size());
02986 
02987   ObjCLayouts[Key] = NewEntry;
02988 
02989   return *NewEntry;
02990 }
02991 
02992 static void PrintOffset(raw_ostream &OS,
02993                         CharUnits Offset, unsigned IndentLevel) {
02994   OS << llvm::format("%4" PRId64 " | ", (int64_t)Offset.getQuantity());
02995   OS.indent(IndentLevel * 2);
02996 }
02997 
02998 static void PrintIndentNoOffset(raw_ostream &OS, unsigned IndentLevel) {
02999   OS << "     | ";
03000   OS.indent(IndentLevel * 2);
03001 }
03002 
03003 static void DumpCXXRecordLayout(raw_ostream &OS,
03004                                 const CXXRecordDecl *RD, const ASTContext &C,
03005                                 CharUnits Offset,
03006                                 unsigned IndentLevel,
03007                                 const char* Description,
03008                                 bool IncludeVirtualBases) {
03009   const ASTRecordLayout &Layout = C.getASTRecordLayout(RD);
03010 
03011   PrintOffset(OS, Offset, IndentLevel);
03012   OS << C.getTypeDeclType(const_cast<CXXRecordDecl *>(RD)).getAsString();
03013   if (Description)
03014     OS << ' ' << Description;
03015   if (RD->isEmpty())
03016     OS << " (empty)";
03017   OS << '\n';
03018 
03019   IndentLevel++;
03020 
03021   const CXXRecordDecl *PrimaryBase = Layout.getPrimaryBase();
03022   bool HasOwnVFPtr = Layout.hasOwnVFPtr();
03023   bool HasOwnVBPtr = Layout.hasOwnVBPtr();
03024 
03025   // Vtable pointer.
03026   if (RD->isDynamicClass() && !PrimaryBase && !isMsLayout(RD)) {
03027     PrintOffset(OS, Offset, IndentLevel);
03028     OS << '(' << *RD << " vtable pointer)\n";
03029   } else if (HasOwnVFPtr) {
03030     PrintOffset(OS, Offset, IndentLevel);
03031     // vfptr (for Microsoft C++ ABI)
03032     OS << '(' << *RD << " vftable pointer)\n";
03033   }
03034 
03035   // Collect nvbases.
03036   SmallVector<const CXXRecordDecl *, 4> Bases;
03037   for (const CXXBaseSpecifier &Base : RD->bases()) {
03038     assert(!Base.getType()->isDependentType() &&
03039            "Cannot layout class with dependent bases.");
03040     if (!Base.isVirtual())
03041       Bases.push_back(Base.getType()->getAsCXXRecordDecl());
03042   }
03043 
03044   // Sort nvbases by offset.
03045   std::stable_sort(Bases.begin(), Bases.end(),
03046                    [&](const CXXRecordDecl *L, const CXXRecordDecl *R) {
03047     return Layout.getBaseClassOffset(L) < Layout.getBaseClassOffset(R);
03048   });
03049 
03050   // Dump (non-virtual) bases
03051   for (const CXXRecordDecl *Base : Bases) {
03052     CharUnits BaseOffset = Offset + Layout.getBaseClassOffset(Base);
03053     DumpCXXRecordLayout(OS, Base, C, BaseOffset, IndentLevel,
03054                         Base == PrimaryBase ? "(primary base)" : "(base)",
03055                         /*IncludeVirtualBases=*/false);
03056   }
03057 
03058   // vbptr (for Microsoft C++ ABI)
03059   if (HasOwnVBPtr) {
03060     PrintOffset(OS, Offset + Layout.getVBPtrOffset(), IndentLevel);
03061     OS << '(' << *RD << " vbtable pointer)\n";
03062   }
03063 
03064   // Dump fields.
03065   uint64_t FieldNo = 0;
03066   for (CXXRecordDecl::field_iterator I = RD->field_begin(),
03067          E = RD->field_end(); I != E; ++I, ++FieldNo) {
03068     const FieldDecl &Field = **I;
03069     CharUnits FieldOffset = Offset + 
03070       C.toCharUnitsFromBits(Layout.getFieldOffset(FieldNo));
03071 
03072     if (const CXXRecordDecl *D = Field.getType()->getAsCXXRecordDecl()) {
03073       DumpCXXRecordLayout(OS, D, C, FieldOffset, IndentLevel,
03074                           Field.getName().data(),
03075                           /*IncludeVirtualBases=*/true);
03076       continue;
03077     }
03078 
03079     PrintOffset(OS, FieldOffset, IndentLevel);
03080     OS << Field.getType().getAsString() << ' ' << Field << '\n';
03081   }
03082 
03083   if (!IncludeVirtualBases)
03084     return;
03085 
03086   // Dump virtual bases.
03087   const ASTRecordLayout::VBaseOffsetsMapTy &vtordisps = 
03088     Layout.getVBaseOffsetsMap();
03089   for (const CXXBaseSpecifier &Base : RD->vbases()) {
03090     assert(Base.isVirtual() && "Found non-virtual class!");
03091     const CXXRecordDecl *VBase = Base.getType()->getAsCXXRecordDecl();
03092 
03093     CharUnits VBaseOffset = Offset + Layout.getVBaseClassOffset(VBase);
03094 
03095     if (vtordisps.find(VBase)->second.hasVtorDisp()) {
03096       PrintOffset(OS, VBaseOffset - CharUnits::fromQuantity(4), IndentLevel);
03097       OS << "(vtordisp for vbase " << *VBase << ")\n";
03098     }
03099 
03100     DumpCXXRecordLayout(OS, VBase, C, VBaseOffset, IndentLevel,
03101                         VBase == PrimaryBase ?
03102                         "(primary virtual base)" : "(virtual base)",
03103                         /*IncludeVirtualBases=*/false);
03104   }
03105 
03106   PrintIndentNoOffset(OS, IndentLevel - 1);
03107   OS << "[sizeof=" << Layout.getSize().getQuantity();
03108   if (!isMsLayout(RD))
03109     OS << ", dsize=" << Layout.getDataSize().getQuantity();
03110   OS << ", align=" << Layout.getAlignment().getQuantity() << '\n';
03111 
03112   PrintIndentNoOffset(OS, IndentLevel - 1);
03113   OS << " nvsize=" << Layout.getNonVirtualSize().getQuantity();
03114   OS << ", nvalign=" << Layout.getNonVirtualAlignment().getQuantity() << "]\n";
03115 }
03116 
03117 void ASTContext::DumpRecordLayout(const RecordDecl *RD,
03118                                   raw_ostream &OS,
03119                                   bool Simple) const {
03120   const ASTRecordLayout &Info = getASTRecordLayout(RD);
03121 
03122   if (const CXXRecordDecl *CXXRD = dyn_cast<CXXRecordDecl>(RD))
03123     if (!Simple)
03124       return DumpCXXRecordLayout(OS, CXXRD, *this, CharUnits(), 0, nullptr,
03125                                  /*IncludeVirtualBases=*/true);
03126 
03127   OS << "Type: " << getTypeDeclType(RD).getAsString() << "\n";
03128   if (!Simple) {
03129     OS << "Record: ";
03130     RD->dump();
03131   }
03132   OS << "\nLayout: ";
03133   OS << "<ASTRecordLayout\n";
03134   OS << "  Size:" << toBits(Info.getSize()) << "\n";
03135   if (!isMsLayout(RD))
03136     OS << "  DataSize:" << toBits(Info.getDataSize()) << "\n";
03137   OS << "  Alignment:" << toBits(Info.getAlignment()) << "\n";
03138   OS << "  FieldOffsets: [";
03139   for (unsigned i = 0, e = Info.getFieldCount(); i != e; ++i) {
03140     if (i) OS << ", ";
03141     OS << Info.getFieldOffset(i);
03142   }
03143   OS << "]>\n";
03144 }