[clang] [clang]Fix Handle structs exceeding 1EB size limit (PR #146032)
via cfe-commits
cfe-commits at lists.llvm.org
Fri Jun 27 00:33:37 PDT 2025
https://github.com/Karthikdhondi updated https://github.com/llvm/llvm-project/pull/146032
>From c6bf84fd57c550b1c160c4497b0de2f2db87c31f Mon Sep 17 00:00:00 2001
From: Karthikdhondi <karthik.dhondi at gmail.com>
Date: Fri, 27 Jun 2025 11:43:31 +0530
Subject: [PATCH 1/2] Fix: Handle structs exceeding 1EB size limit
---
clang/include/clang/Basic/DiagnosticASTKinds.td | 2 ++
clang/lib/AST/RecordLayoutBuilder.cpp | 7 +++++++
clang/test/AST/absurdly_big_struct.cpp | 12 ++++++++++++
clang/test/Sema/offsetof-64.c | 4 ++--
4 files changed, 23 insertions(+), 2 deletions(-)
create mode 100644 clang/test/AST/absurdly_big_struct.cpp
diff --git a/clang/include/clang/Basic/DiagnosticASTKinds.td b/clang/include/clang/Basic/DiagnosticASTKinds.td
index d2cd86d05d55a..e3be4ab47633d 100644
--- a/clang/include/clang/Basic/DiagnosticASTKinds.td
+++ b/clang/include/clang/Basic/DiagnosticASTKinds.td
@@ -999,6 +999,8 @@ def note_module_odr_violation_mismatch_decl_unknown : Note<
"different friend declaration|different function template|different method|"
"different instance variable|different property|another unexpected decl}2">;
+def err_struct_too_large : Error<
+ "structure '%0' is too large, which exceeds maximum allowed size of %1 bytes">;
def remark_sanitize_address_insert_extra_padding_accepted : Remark<
"-fsanitize-address-field-padding applied to %0">, ShowInSystemHeader,
diff --git a/clang/lib/AST/RecordLayoutBuilder.cpp b/clang/lib/AST/RecordLayoutBuilder.cpp
index aacc079f2521d..f6a4f76bcc7f6 100644
--- a/clang/lib/AST/RecordLayoutBuilder.cpp
+++ b/clang/lib/AST/RecordLayoutBuilder.cpp
@@ -3463,6 +3463,13 @@ ASTContext::getASTRecordLayout(const RecordDecl *D) const {
ASTRecordLayouts[D] = NewEntry;
+ constexpr uint64_t MaxStructSizeInBytes = 1ULL << 60;
+ CharUnits StructSize = NewEntry->getSize();
+ if (static_cast<uint64_t>(StructSize.getQuantity()) >= MaxStructSizeInBytes) {
+ getDiagnostics().Report(D->getLocation(), diag::err_struct_too_large)
+ << D->getName() << MaxStructSizeInBytes;
+ }
+
if (getLangOpts().DumpRecordLayouts) {
llvm::outs() << "\n*** Dumping AST Record Layout\n";
DumpRecordLayout(D, llvm::outs(), getLangOpts().DumpRecordLayoutsSimple);
diff --git a/clang/test/AST/absurdly_big_struct.cpp b/clang/test/AST/absurdly_big_struct.cpp
new file mode 100644
index 0000000000000..04ac4d7ef6b74
--- /dev/null
+++ b/clang/test/AST/absurdly_big_struct.cpp
@@ -0,0 +1,12 @@
+// RUN: %clang_cc1 -fsyntax-only -verify %s -triple x86_64-linux-gnu
+
+struct a { // expected-error {{structure 'a' is too large, which exceeds maximum allowed size of 1152921504606846976 bytes}}
+ char x[1ull<<60];
+ char x2[1ull<<60];
+};
+
+a z[1];
+long long x() { return sizeof(a); }
+long long x2() { return sizeof(a::x); }
+long long x3() { return sizeof(a::x2); }
+long long x4() { return sizeof(z); }
diff --git a/clang/test/Sema/offsetof-64.c b/clang/test/Sema/offsetof-64.c
index 8ffc3af985880..692698fe39e00 100644
--- a/clang/test/Sema/offsetof-64.c
+++ b/clang/test/Sema/offsetof-64.c
@@ -2,7 +2,7 @@
// PR15216
// Don't crash when taking computing the offset of structs with large arrays.
-const unsigned long Size = (1l << 60);
+const unsigned long Size = (1l << 58);
struct Chunk1 {
char padding[Size]; // expected-warning {{folded to constant}}
@@ -10,7 +10,7 @@ struct Chunk1 {
char data;
};
-int test1 = __builtin_offsetof(struct Chunk1, data);
+unsigned long test1 = __builtin_offsetof(struct Chunk1, data);
struct Chunk2 {
char padding[Size][Size][Size]; // expected-error {{array is too large}}
>From 49fcccdba686c7cf3f4bbbd10696c66c3527d942 Mon Sep 17 00:00:00 2001
From: Karthikdhondi <karthik.dhondi at gmail.com>
Date: Fri, 27 Jun 2025 13:02:58 +0530
Subject: [PATCH 2/2] Fix: Handle structs exceeding 1EB size limit
---
clang/lib/AST/RecordLayoutBuilder.cpp | 197 ++++++++++++--------------
1 file changed, 93 insertions(+), 104 deletions(-)
diff --git a/clang/lib/AST/RecordLayoutBuilder.cpp b/clang/lib/AST/RecordLayoutBuilder.cpp
index f6a4f76bcc7f6..08bbc7b81dad9 100644
--- a/clang/lib/AST/RecordLayoutBuilder.cpp
+++ b/clang/lib/AST/RecordLayoutBuilder.cpp
@@ -43,7 +43,7 @@ struct BaseSubobjectInfo {
bool IsVirtual;
/// Bases - Information about the base subobjects.
- SmallVector<BaseSubobjectInfo*, 4> Bases;
+ SmallVector<BaseSubobjectInfo *, 4> Bases;
/// PrimaryVirtualBaseInfo - Holds the base info for the primary virtual base
/// of this base info (if one exists).
@@ -77,8 +77,7 @@ struct ExternalLayout {
/// Get the offset of the given field. The external source must provide
/// entries for all fields in the record.
uint64_t getExternalFieldOffset(const FieldDecl *FD) {
- assert(FieldOffsets.count(FD) &&
- "Field does not have an external offset");
+ assert(FieldOffsets.count(FD) && "Field does not have an external offset");
return FieldOffsets[FD];
}
@@ -167,16 +166,15 @@ class EmptySubobjectMap {
CharUnits SizeOfLargestEmptySubobject;
EmptySubobjectMap(const ASTContext &Context, const CXXRecordDecl *Class)
- : Context(Context), CharWidth(Context.getCharWidth()), Class(Class) {
- ComputeEmptySubobjectSizes();
+ : Context(Context), CharWidth(Context.getCharWidth()), Class(Class) {
+ ComputeEmptySubobjectSizes();
}
/// CanPlaceBaseAtOffset - Return whether the given base class can be placed
/// at the given offset.
/// Returns false if placing the record will result in two components
/// (direct or indirect) of the same type having the same offset.
- bool CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
- CharUnits Offset);
+ bool CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info, CharUnits Offset);
/// CanPlaceFieldAtOffset - Return whether a field can be placed at the given
/// offset.
@@ -227,9 +225,8 @@ void EmptySubobjectMap::ComputeEmptySubobjectSizes() {
}
}
-bool
-EmptySubobjectMap::CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
- CharUnits Offset) const {
+bool EmptySubobjectMap::CanPlaceSubobjectAtOffset(const CXXRecordDecl *RD,
+ CharUnits Offset) const {
// We only need to check empty bases.
if (!RD->isEmpty())
return true;
@@ -265,9 +262,8 @@ void EmptySubobjectMap::AddSubobjectAtOffset(const CXXRecordDecl *RD,
MaxEmptyClassOffset = Offset;
}
-bool
-EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset(const BaseSubobjectInfo *Info,
- CharUnits Offset) {
+bool EmptySubobjectMap::CanPlaceBaseSubobjectAtOffset(
+ const BaseSubobjectInfo *Info, CharUnits Offset) {
// We don't have to keep looking past the maximum offset that's known to
// contain an empty class.
if (!AnyEmptySubobjectsBeyondOffset(Offset))
@@ -368,10 +364,9 @@ bool EmptySubobjectMap::CanPlaceBaseAtOffset(const BaseSubobjectInfo *Info,
return true;
}
-bool
-EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
- const CXXRecordDecl *Class,
- CharUnits Offset) const {
+bool EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(
+ const CXXRecordDecl *RD, const CXXRecordDecl *Class,
+ CharUnits Offset) const {
// We don't have to keep looking past the maximum offset that's known to
// contain an empty class.
if (!AnyEmptySubobjectsBeyondOffset(Offset))
@@ -418,9 +413,8 @@ EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const CXXRecordDecl *RD,
return true;
}
-bool
-EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
- CharUnits Offset) const {
+bool EmptySubobjectMap::CanPlaceFieldSubobjectAtOffset(const FieldDecl *FD,
+ CharUnits Offset) const {
// We don't have to keep looking past the maximum offset that's known to
// contain an empty class.
if (!AnyEmptySubobjectsBeyondOffset(Offset))
@@ -560,7 +554,7 @@ void EmptySubobjectMap::UpdateEmptyFieldSubobjects(
}
}
-typedef llvm::SmallPtrSet<const CXXRecordDecl*, 4> ClassSetTy;
+typedef llvm::SmallPtrSet<const CXXRecordDecl *, 4> ClassSetTy;
class ItaniumRecordLayoutBuilder {
protected:
@@ -712,15 +706,13 @@ class ItaniumRecordLayoutBuilder {
bool FieldPacked, const FieldDecl *D);
void LayoutBitField(const FieldDecl *D);
- TargetCXXABI getCXXABI() const {
- return Context.getTargetInfo().getCXXABI();
- }
+ TargetCXXABI getCXXABI() const { return Context.getTargetInfo().getCXXABI(); }
/// BaseSubobjectInfoAllocator - Allocator for BaseSubobjectInfo objects.
llvm::SpecificBumpPtrAllocator<BaseSubobjectInfo> BaseSubobjectInfoAllocator;
typedef llvm::DenseMap<const CXXRecordDecl *, BaseSubobjectInfo *>
- BaseSubobjectInfoMapTy;
+ BaseSubobjectInfoMapTy;
/// VirtualBaseInfo - Map from all the (direct or indirect) virtual bases
/// of the class we're laying out to their base subobject info.
@@ -793,8 +785,8 @@ class ItaniumRecordLayoutBuilder {
uint64_t ComputedOffset);
void CheckFieldPadding(uint64_t Offset, uint64_t UnpaddedOffset,
- uint64_t UnpackedOffset, unsigned UnpackedAlign,
- bool isPacked, const FieldDecl *D);
+ uint64_t UnpackedOffset, unsigned UnpackedAlign,
+ bool isPacked, const FieldDecl *D);
DiagnosticBuilder Diag(SourceLocation Loc, unsigned DiagID);
@@ -965,8 +957,7 @@ BaseSubobjectInfo *ItaniumRecordLayoutBuilder::ComputeBaseSubobjectInfo(
// Traversing the bases must have created the base info for our primary
// virtual base.
PrimaryVirtualBaseInfo = VirtualBaseInfo.lookup(PrimaryVirtualBase);
- assert(PrimaryVirtualBaseInfo &&
- "Did not create a primary virtual base!");
+ assert(PrimaryVirtualBaseInfo && "Did not create a primary virtual base!");
// Claim the primary virtual base as our primary virtual base.
Info->PrimaryVirtualBaseInfo = PrimaryVirtualBaseInfo;
@@ -984,13 +975,12 @@ void ItaniumRecordLayoutBuilder::ComputeBaseSubobjectInfo(
const CXXRecordDecl *BaseDecl = I.getType()->getAsCXXRecordDecl();
// Compute the base subobject info for this base.
- BaseSubobjectInfo *Info = ComputeBaseSubobjectInfo(BaseDecl, IsVirtual,
- nullptr);
+ BaseSubobjectInfo *Info =
+ ComputeBaseSubobjectInfo(BaseDecl, IsVirtual, nullptr);
if (IsVirtual) {
// ComputeBaseInfo has already added this base for us.
- assert(VirtualBaseInfo.count(BaseDecl) &&
- "Did not add virtual base!");
+ assert(VirtualBaseInfo.count(BaseDecl) && "Did not add virtual base!");
} else {
// Add the base info to the map of non-virtual bases.
assert(!NonVirtualBaseInfo.count(BaseDecl) &&
@@ -1043,15 +1033,15 @@ void ItaniumRecordLayoutBuilder::LayoutNonVirtualBases(
LayoutVirtualBase(PrimaryBaseInfo);
} else {
BaseSubobjectInfo *PrimaryBaseInfo =
- NonVirtualBaseInfo.lookup(PrimaryBase);
+ NonVirtualBaseInfo.lookup(PrimaryBase);
assert(PrimaryBaseInfo &&
"Did not find base info for non-virtual primary base!");
LayoutNonVirtualBase(PrimaryBaseInfo);
}
- // If this class needs a vtable/vf-table and didn't get one from a
- // primary base, add it in now.
+ // If this class needs a vtable/vf-table and didn't get one from a
+ // primary base, add it in now.
} else if (RD->isDynamicClass()) {
assert(DataSize == 0 && "Vtable pointer must be at offset zero!");
CharUnits PtrWidth = Context.toCharUnitsFromBits(
@@ -1191,8 +1181,8 @@ void ItaniumRecordLayoutBuilder::LayoutVirtualBase(
// Add its base class offset.
assert(!VBases.count(Base->Class) && "vbase offset already exists!");
- VBases.insert(std::make_pair(Base->Class,
- ASTRecordLayout::VBaseInfo(Offset, false)));
+ VBases.insert(
+ std::make_pair(Base->Class, ASTRecordLayout::VBaseInfo(Offset, false)));
AddPrimaryVirtualBaseOffsets(Base, Offset);
}
@@ -1451,9 +1441,8 @@ void ItaniumRecordLayoutBuilder::LayoutFields(const RecordDecl *D) {
}
// Rounds the specified size to have it a multiple of the char size.
-static uint64_t
-roundUpSizeToCharAlignment(uint64_t Size,
- const ASTContext &Context) {
+static uint64_t roundUpSizeToCharAlignment(uint64_t Size,
+ const ASTContext &Context) {
uint64_t CharAlignment = Context.getTargetInfo().getCharAlign();
return llvm::alignTo(Size, CharAlignment);
}
@@ -1498,8 +1487,7 @@ void ItaniumRecordLayoutBuilder::LayoutWideBitField(uint64_t FieldSize,
uint64_t UnpaddedFieldOffset = getDataSizeInBits() - UnfilledBitsInLastUnit;
if (IsUnion) {
- uint64_t RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize,
- Context);
+ uint64_t RoundedFieldSize = roundUpSizeToCharAlignment(FieldSize, Context);
setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize));
FieldOffset = 0;
} else {
@@ -1651,7 +1639,7 @@ void ItaniumRecordLayoutBuilder::LayoutBitField(const FieldDecl *D) {
// Compute the next available bit offset.
uint64_t FieldOffset =
- IsUnion ? 0 : (getDataSizeInBits() - UnfilledBitsInLastUnit);
+ IsUnion ? 0 : (getDataSizeInBits() - UnfilledBitsInLastUnit);
// Handle targets that don't honor bitfield type alignment.
if (!IsMsStruct && !Context.getTargetInfo().useBitFieldTypeAlignment()) {
@@ -1669,7 +1657,7 @@ void ItaniumRecordLayoutBuilder::LayoutBitField(const FieldDecl *D) {
Context.getTargetInfo().getZeroLengthBitfieldBoundary();
FieldAlign = std::max(FieldAlign, ZeroLengthBitfieldBoundary);
}
- // If that doesn't apply, just ignore the field alignment.
+ // If that doesn't apply, just ignore the field alignment.
} else {
FieldAlign = 1;
}
@@ -1813,8 +1801,8 @@ void ItaniumRecordLayoutBuilder::LayoutBitField(const FieldDecl *D) {
}
setDataSize(std::max(getDataSizeInBits(), RoundedFieldSize));
- // For non-zero-width bitfields in ms_struct structs, allocate a new
- // storage unit if necessary.
+ // For non-zero-width bitfields in ms_struct structs, allocate a new
+ // storage unit if necessary.
} else if (IsMsStruct && FieldSize) {
// We should have cleared UnfilledBitsInLastUnit in every case
// where we changed storage units.
@@ -1963,13 +1951,13 @@ void ItaniumRecordLayoutBuilder::LayoutField(const FieldDecl *D,
}
}
- bool FieldPacked = (Packed && (!FieldClass || FieldClass->isPOD() ||
- FieldClass->hasAttr<PackedAttr>() ||
- Context.getLangOpts().getClangABICompat() <=
- LangOptions::ClangABI::Ver15 ||
- Target.isPS() || Target.isOSDarwin() ||
- Target.isOSAIX())) ||
- D->hasAttr<PackedAttr>();
+ bool FieldPacked =
+ (Packed && (!FieldClass || FieldClass->isPOD() ||
+ FieldClass->hasAttr<PackedAttr>() ||
+ Context.getLangOpts().getClangABICompat() <=
+ LangOptions::ClangABI::Ver15 ||
+ Target.isPS() || Target.isOSDarwin() || Target.isOSAIX())) ||
+ D->hasAttr<PackedAttr>();
// When used as part of a typedef, or together with a 'packed' attribute, the
// 'aligned' attribute can be used to decrease alignment. In that case, it
@@ -2039,7 +2027,6 @@ void ItaniumRecordLayoutBuilder::LayoutField(const FieldDecl *D,
UnpackedFieldAlign = std::min(UnpackedFieldAlign, MaxFieldAlignment);
}
-
if (!FieldPacked)
FieldAlign = UnpackedFieldAlign;
if (DefaultsToAIXPowerAlignment)
@@ -2145,8 +2132,7 @@ void ItaniumRecordLayoutBuilder::FinishLayout(const NamedDecl *D) {
// array of zero-length, remains of Size 0
if (RD->isEmpty())
setSize(CharUnits::One());
- }
- else
+ } else
setSize(CharUnits::One());
}
@@ -2193,8 +2179,7 @@ void ItaniumRecordLayoutBuilder::FinishLayout(const NamedDecl *D) {
InBits = false;
}
Diag(RD->getLocation(), diag::warn_padded_struct_size)
- << Context.getTypeDeclType(RD)
- << PadSize
+ << Context.getTypeDeclType(RD) << PadSize
<< (InBits ? 1 : 0); // (byte|bit)
}
@@ -2273,7 +2258,8 @@ static unsigned getPaddingDiagFromTagKind(TagTypeKind Tag) {
return 1;
case TagTypeKind::Class:
return 2;
- default: llvm_unreachable("Invalid tag kind for field padding diagnostic!");
+ default:
+ llvm_unreachable("Invalid tag kind for field padding diagnostic!");
}
}
@@ -2351,7 +2337,7 @@ static const CXXMethodDecl *computeKeyFunction(ASTContext &Context,
return nullptr;
bool allowInlineFunctions =
- Context.getTargetInfo().getCXXABI().canKeyFunctionBeInline();
+ Context.getTargetInfo().getCXXABI().canKeyFunctionBeInline();
for (const CXXMethodDecl *MD : RD->methods()) {
if (!MD->isVirtual())
@@ -2572,6 +2558,7 @@ struct MicrosoftRecordLayoutBuilder {
private:
MicrosoftRecordLayoutBuilder(const MicrosoftRecordLayoutBuilder &) = delete;
void operator=(const MicrosoftRecordLayoutBuilder &) = delete;
+
public:
void layout(const RecordDecl *RD);
void cxxLayout(const CXXRecordDecl *RD);
@@ -2694,15 +2681,15 @@ MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(
// the alignment in the case of pragma pack. Note that the required alignment
// doesn't actually apply to the struct alignment at this point.
Alignment = std::max(Alignment, Info.Alignment);
- RequiredAlignment = std::max(RequiredAlignment, Layout.getRequiredAlignment());
+ RequiredAlignment =
+ std::max(RequiredAlignment, Layout.getRequiredAlignment());
Info.Alignment = std::max(Info.Alignment, Layout.getRequiredAlignment());
Info.Size = Layout.getNonVirtualSize();
return Info;
}
MicrosoftRecordLayoutBuilder::ElementInfo
-MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(
- const FieldDecl *FD) {
+MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(const FieldDecl *FD) {
// Get the alignment of the field type's natural alignment, ignore any
// alignment attributes.
auto TInfo =
@@ -2725,8 +2712,8 @@ MicrosoftRecordLayoutBuilder::getAdjustedElementInfo(
FD->getType()->getBaseElementTypeUnsafe()->getAs<RecordType>()) {
auto const &Layout = Context.getASTRecordLayout(RT->getDecl());
EndsWithZeroSizedObject = Layout.endsWithZeroSizedObject();
- FieldRequiredAlignment = std::max(FieldRequiredAlignment,
- Layout.getRequiredAlignment());
+ FieldRequiredAlignment =
+ std::max(FieldRequiredAlignment, Layout.getRequiredAlignment());
}
// Capture required alignment as a side-effect.
RequiredAlignment = std::max(RequiredAlignment, FieldRequiredAlignment);
@@ -2788,10 +2775,11 @@ void MicrosoftRecordLayoutBuilder::initializeLayout(const RecordDecl *RD) {
MaxFieldAlignment = CharUnits::Zero();
// Honor the default struct packing maximum alignment flag.
if (unsigned DefaultMaxFieldAlignment = Context.getLangOpts().PackStruct)
- MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
+ MaxFieldAlignment = CharUnits::fromQuantity(DefaultMaxFieldAlignment);
// Honor the packing attribute. The MS-ABI ignores pragma pack if its larger
// than the pointer size.
- if (const MaxFieldAlignmentAttr *MFAA = RD->getAttr<MaxFieldAlignmentAttr>()){
+ if (const MaxFieldAlignmentAttr *MFAA =
+ RD->getAttr<MaxFieldAlignmentAttr>()) {
unsigned PackedAlignment = MFAA->getAlignment();
if (PackedAlignment <=
Context.getTargetInfo().getPointerWidth(LangAS::Default))
@@ -2809,8 +2797,8 @@ void MicrosoftRecordLayoutBuilder::initializeLayout(const RecordDecl *RD) {
External.BaseOffsets, External.VirtualBaseOffsets);
}
-void
-MicrosoftRecordLayoutBuilder::initializeCXXLayout(const CXXRecordDecl *RD) {
+void MicrosoftRecordLayoutBuilder::initializeCXXLayout(
+ const CXXRecordDecl *RD) {
EndsWithZeroSizedObject = false;
LeadsWithZeroSizedBase = false;
HasOwnVFPtr = false;
@@ -2828,8 +2816,8 @@ MicrosoftRecordLayoutBuilder::initializeCXXLayout(const CXXRecordDecl *RD) {
PointerInfo.Alignment = std::min(PointerInfo.Alignment, MaxFieldAlignment);
}
-void
-MicrosoftRecordLayoutBuilder::layoutNonVirtualBases(const CXXRecordDecl *RD) {
+void MicrosoftRecordLayoutBuilder::layoutNonVirtualBases(
+ const CXXRecordDecl *RD) {
// The MS-ABI lays out all bases that contain leading vfptrs before it lays
// out any bases that do not contain vfptrs. We implement this as two passes
// over the bases. This approach guarantees that the primary base is laid out
@@ -3080,8 +3068,8 @@ void MicrosoftRecordLayoutBuilder::layoutBitField(const FieldDecl *FD) {
DataSize = Size;
}
-void
-MicrosoftRecordLayoutBuilder::layoutZeroWidthBitField(const FieldDecl *FD) {
+void MicrosoftRecordLayoutBuilder::layoutZeroWidthBitField(
+ const FieldDecl *FD) {
// Zero-width bitfields are ignored unless they follow a non-zero-width
// bitfield.
if (!LastFieldIsNonZeroWidthBitfield) {
@@ -3223,8 +3211,8 @@ void MicrosoftRecordLayoutBuilder::layoutVirtualBases(const CXXRecordDecl *RD) {
assert(BaseOffset >= Size && "base offset already allocated");
- VBases.insert(std::make_pair(BaseDecl,
- ASTRecordLayout::VBaseInfo(BaseOffset, HasVtordisp)));
+ VBases.insert(std::make_pair(
+ BaseDecl, ASTRecordLayout::VBaseInfo(BaseOffset, HasVtordisp)));
Size = BaseOffset + BaseLayout.getNonVirtualSize();
PreviousBaseLayout = &BaseLayout;
}
@@ -3289,10 +3277,9 @@ void MicrosoftRecordLayoutBuilder::finalizeLayout(const RecordDecl *RD) {
// Recursively walks the non-virtual bases of a class and determines if any of
// them are in the bases with overridden methods set.
-static bool
-RequiresVtordisp(const llvm::SmallPtrSetImpl<const CXXRecordDecl *> &
- BasesWithOverriddenMethods,
- const CXXRecordDecl *RD) {
+static bool RequiresVtordisp(const llvm::SmallPtrSetImpl<const CXXRecordDecl *>
+ &BasesWithOverriddenMethods,
+ const CXXRecordDecl *RD) {
if (BasesWithOverriddenMethods.count(RD))
return true;
// If any of a virtual bases non-virtual bases (recursively) requires a
@@ -3382,7 +3369,7 @@ ASTContext::getASTRecordLayout(const RecordDecl *D) const {
// until we *finish* parsing the definition.
if (D->hasExternalLexicalStorage() && !D->getDefinition())
- getExternalSource()->CompleteType(const_cast<RecordDecl*>(D));
+ getExternalSource()->CompleteType(const_cast<RecordDecl *>(D));
// Complete the redecl chain (if necessary).
(void)D->getMostRecentDecl();
@@ -3395,7 +3382,8 @@ ASTContext::getASTRecordLayout(const RecordDecl *D) const {
// Note that we can't save a reference to the entry because this function
// is recursive.
const ASTRecordLayout *Entry = ASTRecordLayouts[D];
- if (Entry) return *Entry;
+ if (Entry)
+ return *Entry;
const ASTRecordLayout *NewEntry = nullptr;
@@ -3463,11 +3451,11 @@ ASTContext::getASTRecordLayout(const RecordDecl *D) const {
ASTRecordLayouts[D] = NewEntry;
- constexpr uint64_t MaxStructSizeInBytes = 1ULL << 60;
- CharUnits StructSize = NewEntry->getSize();
+ constexpr uint64_t MaxStructSizeInBytes = 1ULL << 60;
+ CharUnits StructSize = NewEntry->getSize();
if (static_cast<uint64_t>(StructSize.getQuantity()) >= MaxStructSizeInBytes) {
getDiagnostics().Report(D->getLocation(), diag::err_struct_too_large)
- << D->getName() << MaxStructSizeInBytes;
+ << D->getName() << MaxStructSizeInBytes;
}
if (getLangOpts().DumpRecordLayouts) {
@@ -3478,7 +3466,8 @@ ASTContext::getASTRecordLayout(const RecordDecl *D) const {
return *NewEntry;
}
-const CXXMethodDecl *ASTContext::getCurrentKeyFunction(const CXXRecordDecl *RD) {
+const CXXMethodDecl *
+ASTContext::getCurrentKeyFunction(const CXXRecordDecl *RD) {
if (!getTargetInfo().getCXXABI().hasKeyFunctions())
return nullptr;
@@ -3496,7 +3485,7 @@ const CXXMethodDecl *ASTContext::getCurrentKeyFunction(const CXXRecordDecl *RD)
// Store it back if it changed.
if (Entry.isOffset() || Entry.isValid() != bool(Result))
- KeyFunctions[RD] = const_cast<Decl*>(Result);
+ KeyFunctions[RD] = const_cast<Decl *>(Result);
return cast_or_null<CXXMethodDecl>(Result);
}
@@ -3512,7 +3501,8 @@ void ASTContext::setNonKeyFunction(const CXXMethodDecl *Method) {
auto I = Map.find(Method->getParent());
// If it's not cached, there's nothing to do.
- if (I == Map.end()) return;
+ if (I == Map.end())
+ return;
// If it is cached, check whether it's the target method, and if so,
// remove it from the cache. Note, the call to 'get' might invalidate
@@ -3557,8 +3547,8 @@ uint64_t ASTContext::lookupFieldBitOffset(const ObjCInterfaceDecl *OID,
// directly.
unsigned Index = 0;
- for (const ObjCIvarDecl *IVD = Container->all_declared_ivar_begin();
- IVD; IVD = IVD->getNextIvar()) {
+ for (const ObjCIvarDecl *IVD = Container->all_declared_ivar_begin(); IVD;
+ IVD = IVD->getNextIvar()) {
if (Ivar == IVD)
break;
++Index;
@@ -3577,7 +3567,7 @@ const ASTRecordLayout &
ASTContext::getObjCLayout(const ObjCInterfaceDecl *D) const {
// Retrieve the definition
if (D->hasExternalLexicalStorage() && !D->getDefinition())
- getExternalSource()->CompleteType(const_cast<ObjCInterfaceDecl*>(D));
+ getExternalSource()->CompleteType(const_cast<ObjCInterfaceDecl *>(D));
D = D->getDefinition();
assert(D && !D->isInvalidDecl() && D->isThisDeclarationADefinition() &&
"Invalid interface decl!");
@@ -3600,8 +3590,8 @@ ASTContext::getObjCLayout(const ObjCInterfaceDecl *D) const {
return *NewEntry;
}
-static void PrintOffset(raw_ostream &OS,
- CharUnits Offset, unsigned IndentLevel) {
+static void PrintOffset(raw_ostream &OS, CharUnits Offset,
+ unsigned IndentLevel) {
OS << llvm::format("%10" PRId64 " | ", (int64_t)Offset.getQuantity());
OS.indent(IndentLevel * 2);
}
@@ -3630,12 +3620,9 @@ static void PrintIndentNoOffset(raw_ostream &OS, unsigned IndentLevel) {
}
static void DumpRecordLayout(raw_ostream &OS, const RecordDecl *RD,
- const ASTContext &C,
- CharUnits Offset,
- unsigned IndentLevel,
- const char* Description,
- bool PrintSizeInfo,
- bool IncludeVirtualBases) {
+ const ASTContext &C, CharUnits Offset,
+ unsigned IndentLevel, const char *Description,
+ bool PrintSizeInfo, bool IncludeVirtualBases) {
const ASTRecordLayout &Layout = C.getASTRecordLayout(RD);
auto CXXRD = dyn_cast<CXXRecordDecl>(RD);
@@ -3701,7 +3688,7 @@ static void DumpRecordLayout(raw_ostream &OS, const RecordDecl *RD,
uint64_t LocalFieldOffsetInBits =
Layout.getFieldOffset(Field->getFieldIndex());
CharUnits FieldOffset =
- Offset + C.toCharUnitsFromBits(LocalFieldOffsetInBits);
+ Offset + C.toCharUnitsFromBits(LocalFieldOffsetInBits);
// Recursively dump fields of record type.
if (auto RT = Field->getType()->getAs<RecordType>()) {
@@ -3729,7 +3716,7 @@ static void DumpRecordLayout(raw_ostream &OS, const RecordDecl *RD,
// Dump virtual bases.
if (CXXRD && IncludeVirtualBases) {
const ASTRecordLayout::VBaseOffsetsMapTy &VtorDisps =
- Layout.getVBaseOffsetsMap();
+ Layout.getVBaseOffsetsMap();
for (const CXXBaseSpecifier &Base : CXXRD->vbases()) {
assert(Base.isVirtual() && "Found non-virtual class!");
@@ -3743,14 +3730,16 @@ static void DumpRecordLayout(raw_ostream &OS, const RecordDecl *RD,
}
DumpRecordLayout(OS, VBase, C, VBaseOffset, IndentLevel,
- VBase == Layout.getPrimaryBase() ?
- "(primary virtual base)" : "(virtual base)",
+ VBase == Layout.getPrimaryBase()
+ ? "(primary virtual base)"
+ : "(virtual base)",
/*PrintSizeInfo=*/false,
/*IncludeVirtualBases=*/false);
}
}
- if (!PrintSizeInfo) return;
+ if (!PrintSizeInfo)
+ return;
PrintIndentNoOffset(OS, IndentLevel - 1);
OS << "[sizeof=" << Layout.getSize().getQuantity();
More information about the cfe-commits
mailing list