[llvm] a565509 - [ADT] Use Empty Base Optimization for Allocators

Nathan James via llvm-commits llvm-commits at lists.llvm.org
Tue Jul 12 15:57:14 PDT 2022


Author: Nathan James
Date: 2022-07-12T23:57:04+01:00
New Revision: a565509308f9372c4de1c4c32afde461a42e81c8

URL: https://github.com/llvm/llvm-project/commit/a565509308f9372c4de1c4c32afde461a42e81c8
DIFF: https://github.com/llvm/llvm-project/commit/a565509308f9372c4de1c4c32afde461a42e81c8.diff

LOG: [ADT] Use Empty Base Optimization for Allocators

In D94439, BumpPtrAllocator changed its implementation to use an empty base optimization for the underlying allocator.
This patch builds on that by extending its functionality to more classes as well as enabling the underlying allocator to be a reference type, something not currently possible as you can't derive from a reference.

The main place this sees use is in StringMaps which often use the default MallocAllocator, yet have to pay the size of a pointer for no reason.

Reviewed By: dblaikie

Differential Revision: https://reviews.llvm.org/D129206

Added: 
    

Modified: 
    llvm/include/llvm/ADT/ScopedHashTable.h
    llvm/include/llvm/ADT/StringMap.h
    llvm/include/llvm/Support/Allocator.h
    llvm/include/llvm/Support/AllocatorBase.h
    llvm/unittests/ADT/StringMapTest.cpp

Removed: 
    


################################################################################
diff  --git a/llvm/include/llvm/ADT/ScopedHashTable.h b/llvm/include/llvm/ADT/ScopedHashTable.h
index 48544961d095b..78d4df7d56845 100644
--- a/llvm/include/llvm/ADT/ScopedHashTable.h
+++ b/llvm/include/llvm/ADT/ScopedHashTable.h
@@ -147,7 +147,9 @@ class ScopedHashTableIterator {
 };
 
 template <typename K, typename V, typename KInfo, typename AllocatorTy>
-class ScopedHashTable {
+class ScopedHashTable : detail::AllocatorHolder<AllocatorTy> {
+  using AllocTy = detail::AllocatorHolder<AllocatorTy>;
+
 public:
   /// ScopeTy - This is a helpful typedef that allows clients to get easy access
   /// to the name of the scope for this hash table.
@@ -162,11 +164,9 @@ class ScopedHashTable {
   DenseMap<K, ValTy*, KInfo> TopLevelMap;
   ScopeTy *CurScope = nullptr;
 
-  AllocatorTy Allocator;
-
 public:
   ScopedHashTable() = default;
-  ScopedHashTable(AllocatorTy A) : Allocator(A) {}
+  ScopedHashTable(AllocatorTy A) : AllocTy(A) {}
   ScopedHashTable(const ScopedHashTable &) = delete;
   ScopedHashTable &operator=(const ScopedHashTable &) = delete;
 
@@ -175,8 +175,7 @@ class ScopedHashTable {
   }
 
   /// Access to the allocator.
-  AllocatorTy &getAllocator() { return Allocator; }
-  const AllocatorTy &getAllocator() const { return Allocator; }
+  using AllocTy::getAllocator;
 
   /// Return 1 if the specified key is in the table, 0 otherwise.
   size_type count(const K &Key) const {
@@ -217,7 +216,7 @@ class ScopedHashTable {
     assert(S && "No scope active!");
     ScopedHashTableVal<K, V> *&KeyEntry = TopLevelMap[Key];
     KeyEntry = ValTy::Create(S->getLastValInScope(), KeyEntry, Key, Val,
-                             Allocator);
+                             getAllocator());
     S->setLastValInScope(KeyEntry);
   }
 };

diff  --git a/llvm/include/llvm/ADT/StringMap.h b/llvm/include/llvm/ADT/StringMap.h
index 23248093c67e2..81f2626eea72c 100644
--- a/llvm/include/llvm/ADT/StringMap.h
+++ b/llvm/include/llvm/ADT/StringMap.h
@@ -107,8 +107,9 @@ class StringMapImpl {
 /// funky memory allocation and hashing things to make it extremely efficient,
 /// storing the string data *after* the value in the map.
 template <typename ValueTy, typename AllocatorTy = MallocAllocator>
-class StringMap : public StringMapImpl {
-  AllocatorTy Allocator;
+class StringMap : public StringMapImpl,
+                  private detail::AllocatorHolder<AllocatorTy> {
+  using AllocTy = detail::AllocatorHolder<AllocatorTy>;
 
 public:
   using MapEntryTy = StringMapEntry<ValueTy>;
@@ -119,12 +120,11 @@ class StringMap : public StringMapImpl {
       : StringMapImpl(InitialSize, static_cast<unsigned>(sizeof(MapEntryTy))) {}
 
   explicit StringMap(AllocatorTy A)
-      : StringMapImpl(static_cast<unsigned>(sizeof(MapEntryTy))), Allocator(A) {
-  }
+      : StringMapImpl(static_cast<unsigned>(sizeof(MapEntryTy))), AllocTy(A) {}
 
   StringMap(unsigned InitialSize, AllocatorTy A)
       : StringMapImpl(InitialSize, static_cast<unsigned>(sizeof(MapEntryTy))),
-        Allocator(A) {}
+        AllocTy(A) {}
 
   StringMap(std::initializer_list<std::pair<StringRef, ValueTy>> List)
       : StringMapImpl(List.size(), static_cast<unsigned>(sizeof(MapEntryTy))) {
@@ -132,11 +132,11 @@ class StringMap : public StringMapImpl {
   }
 
   StringMap(StringMap &&RHS)
-      : StringMapImpl(std::move(RHS)), Allocator(std::move(RHS.Allocator)) {}
+      : StringMapImpl(std::move(RHS)), AllocTy(std::move(RHS.getAllocator())) {}
 
   StringMap(const StringMap &RHS)
       : StringMapImpl(static_cast<unsigned>(sizeof(MapEntryTy))),
-        Allocator(RHS.Allocator) {
+        AllocTy(RHS.getAllocator()) {
     if (RHS.empty())
       return;
 
@@ -156,7 +156,7 @@ class StringMap : public StringMapImpl {
       }
 
       TheTable[I] = MapEntryTy::Create(
-          static_cast<MapEntryTy *>(Bucket)->getKey(), Allocator,
+          static_cast<MapEntryTy *>(Bucket)->getKey(), getAllocator(),
           static_cast<MapEntryTy *>(Bucket)->getValue());
       HashTable[I] = RHSHashTable[I];
     }
@@ -171,7 +171,7 @@ class StringMap : public StringMapImpl {
 
   StringMap &operator=(StringMap RHS) {
     StringMapImpl::swap(RHS);
-    std::swap(Allocator, RHS.Allocator);
+    std::swap(getAllocator(), RHS.getAllocator());
     return *this;
   }
 
@@ -183,15 +183,14 @@ class StringMap : public StringMapImpl {
       for (unsigned I = 0, E = NumBuckets; I != E; ++I) {
         StringMapEntryBase *Bucket = TheTable[I];
         if (Bucket && Bucket != getTombstoneVal()) {
-          static_cast<MapEntryTy *>(Bucket)->Destroy(Allocator);
+          static_cast<MapEntryTy *>(Bucket)->Destroy(getAllocator());
         }
       }
     }
     free(TheTable);
   }
 
-  AllocatorTy &getAllocator() { return Allocator; }
-  const AllocatorTy &getAllocator() const { return Allocator; }
+  using AllocTy::getAllocator;
 
   using key_type = const char *;
   using mapped_type = ValueTy;
@@ -336,7 +335,8 @@ class StringMap : public StringMapImpl {
 
     if (Bucket == getTombstoneVal())
       --NumTombstones;
-    Bucket = MapEntryTy::Create(Key, Allocator, std::forward<ArgsTy>(Args)...);
+    Bucket =
+        MapEntryTy::Create(Key, getAllocator(), std::forward<ArgsTy>(Args)...);
     ++NumItems;
     assert(NumItems + NumTombstones <= NumBuckets);
 
@@ -354,7 +354,7 @@ class StringMap : public StringMapImpl {
     for (unsigned I = 0, E = NumBuckets; I != E; ++I) {
       StringMapEntryBase *&Bucket = TheTable[I];
       if (Bucket && Bucket != getTombstoneVal()) {
-        static_cast<MapEntryTy *>(Bucket)->Destroy(Allocator);
+        static_cast<MapEntryTy *>(Bucket)->Destroy(getAllocator());
       }
       Bucket = nullptr;
     }
@@ -370,7 +370,7 @@ class StringMap : public StringMapImpl {
   void erase(iterator I) {
     MapEntryTy &V = *I;
     remove(&V);
-    V.Destroy(Allocator);
+    V.Destroy(getAllocator());
   }
 
   bool erase(StringRef Key) {

diff  --git a/llvm/include/llvm/Support/Allocator.h b/llvm/include/llvm/Support/Allocator.h
index 5ca0c9decac30..041729fa6594d 100644
--- a/llvm/include/llvm/Support/Allocator.h
+++ b/llvm/include/llvm/Support/Allocator.h
@@ -63,7 +63,9 @@ template <typename AllocatorT = MallocAllocator, size_t SlabSize = 4096,
 class BumpPtrAllocatorImpl
     : public AllocatorBase<BumpPtrAllocatorImpl<AllocatorT, SlabSize,
                                                 SizeThreshold, GrowthDelay>>,
-      private AllocatorT {
+      private detail::AllocatorHolder<AllocatorT> {
+  using AllocTy = detail::AllocatorHolder<AllocatorT>;
+
 public:
   static_assert(SizeThreshold <= SlabSize,
                 "The SizeThreshold must be at most the SlabSize to ensure "
@@ -77,12 +79,12 @@ class BumpPtrAllocatorImpl
 
   template <typename T>
   BumpPtrAllocatorImpl(T &&Allocator)
-      : AllocatorT(std::forward<T &&>(Allocator)) {}
+      : AllocTy(std::forward<T &&>(Allocator)) {}
 
   // Manually implement a move constructor as we must clear the old allocator's
   // slabs as a matter of correctness.
   BumpPtrAllocatorImpl(BumpPtrAllocatorImpl &&Old)
-      : AllocatorT(static_cast<AllocatorT &&>(Old)), CurPtr(Old.CurPtr),
+      : AllocTy(std::move(Old.getAllocator())), CurPtr(Old.CurPtr),
         End(Old.End), Slabs(std::move(Old.Slabs)),
         CustomSizedSlabs(std::move(Old.CustomSizedSlabs)),
         BytesAllocated(Old.BytesAllocated), RedZoneSize(Old.RedZoneSize) {
@@ -107,7 +109,7 @@ class BumpPtrAllocatorImpl
     RedZoneSize = RHS.RedZoneSize;
     Slabs = std::move(RHS.Slabs);
     CustomSizedSlabs = std::move(RHS.CustomSizedSlabs);
-    AllocatorT::operator=(static_cast<AllocatorT &&>(RHS));
+    AllocTy::operator=(std::move(RHS.getAllocator()));
 
     RHS.CurPtr = RHS.End = nullptr;
     RHS.BytesAllocated = 0;
@@ -175,7 +177,7 @@ class BumpPtrAllocatorImpl
     size_t PaddedSize = SizeToAllocate + Alignment.value() - 1;
     if (PaddedSize > SizeThreshold) {
       void *NewSlab =
-          AllocatorT::Allocate(PaddedSize, alignof(std::max_align_t));
+          this->getAllocator().Allocate(PaddedSize, alignof(std::max_align_t));
       // We own the new slab and don't want anyone reading anyting other than
       // pieces returned from this method.  So poison the whole slab.
       __asan_poison_memory_region(NewSlab, PaddedSize);
@@ -334,8 +336,8 @@ class BumpPtrAllocatorImpl
   void StartNewSlab() {
     size_t AllocatedSlabSize = computeSlabSize(Slabs.size());
 
-    void *NewSlab =
-        AllocatorT::Allocate(AllocatedSlabSize, alignof(std::max_align_t));
+    void *NewSlab = this->getAllocator().Allocate(AllocatedSlabSize,
+                                                  alignof(std::max_align_t));
     // We own the new slab and don't want anyone reading anything other than
     // pieces returned from this method.  So poison the whole slab.
     __asan_poison_memory_region(NewSlab, AllocatedSlabSize);
@@ -351,7 +353,8 @@ class BumpPtrAllocatorImpl
     for (; I != E; ++I) {
       size_t AllocatedSlabSize =
           computeSlabSize(std::distance(Slabs.begin(), I));
-      AllocatorT::Deallocate(*I, AllocatedSlabSize, alignof(std::max_align_t));
+      this->getAllocator().Deallocate(*I, AllocatedSlabSize,
+                                      alignof(std::max_align_t));
     }
   }
 
@@ -360,7 +363,7 @@ class BumpPtrAllocatorImpl
     for (auto &PtrAndSize : CustomSizedSlabs) {
       void *Ptr = PtrAndSize.first;
       size_t Size = PtrAndSize.second;
-      AllocatorT::Deallocate(Ptr, Size, alignof(std::max_align_t));
+      this->getAllocator().Deallocate(Ptr, Size, alignof(std::max_align_t));
     }
   }
 

diff  --git a/llvm/include/llvm/Support/AllocatorBase.h b/llvm/include/llvm/Support/AllocatorBase.h
index eccced1d1ff47..5d05d3f8777b0 100644
--- a/llvm/include/llvm/Support/AllocatorBase.h
+++ b/llvm/include/llvm/Support/AllocatorBase.h
@@ -99,6 +99,28 @@ class MallocAllocator : public AllocatorBase<MallocAllocator> {
   void PrintStats() const {}
 };
 
+namespace detail {
+
+template <typename Alloc> class AllocatorHolder : Alloc {
+public:
+  AllocatorHolder() = default;
+  AllocatorHolder(const Alloc &A) : Alloc(A) {}
+  AllocatorHolder(Alloc &&A) : Alloc(static_cast<Alloc &&>(A)) {}
+  Alloc &getAllocator() { return *this; }
+  const Alloc &getAllocator() const { return *this; }
+};
+
+template <typename Alloc> class AllocatorHolder<Alloc &> {
+  Alloc &A;
+
+public:
+  AllocatorHolder(Alloc &A) : A(A) {}
+  Alloc &getAllocator() { return A; }
+  const Alloc &getAllocator() const { return A; }
+};
+
+} // namespace detail
+
 } // namespace llvm
 
 #endif // LLVM_SUPPORT_ALLOCATORBASE_H

diff  --git a/llvm/unittests/ADT/StringMapTest.cpp b/llvm/unittests/ADT/StringMapTest.cpp
index 817fec6c37a2a..1eac07bcfb832 100644
--- a/llvm/unittests/ADT/StringMapTest.cpp
+++ b/llvm/unittests/ADT/StringMapTest.cpp
@@ -17,6 +17,10 @@ using namespace llvm;
 
 namespace {
 
+static_assert(sizeof(StringMap<uint32_t>) <
+                  sizeof(StringMap<uint32_t, MallocAllocator &>),
+              "Ensure empty base optimization happens with default allocator");
+
 // Test fixture
 class StringMapTest : public testing::Test {
 protected:


        


More information about the llvm-commits mailing list