[compiler-rt] [scudo] Add EnableMultiRegions mode (PR #98076)

Christopher Ferris via llvm-commits llvm-commits at lists.llvm.org
Fri Sep 6 16:33:44 PDT 2024


================
@@ -555,18 +715,208 @@ template <typename Config> class SizeClassAllocator64 {
     bool isPopulatingFreeList GUARDED_BY(FLLock) = false;
   };
   struct RegionInfo : UnpaddedRegionInfo {
+    // This is only used when `Config::getEnableMultiRegions` is enabled and is
+    // guarded by the mutex in `RegionInfoManager`.
+    RegionInfo *Next = nullptr;
     char Padding[SCUDO_CACHE_LINE_SIZE -
-                 (sizeof(UnpaddedRegionInfo) % SCUDO_CACHE_LINE_SIZE)] = {};
+                 ((sizeof(UnpaddedRegionInfo) + sizeof(RegionInfo *)) %
+                  SCUDO_CACHE_LINE_SIZE)] = {};
   };
   static_assert(sizeof(RegionInfo) % SCUDO_CACHE_LINE_SIZE == 0, "");
 
-  RegionInfo *getRegionInfo(uptr ClassId) {
-    DCHECK_LT(ClassId, NumClasses);
-    return &RegionInfoArray[ClassId];
-  }
+  template <bool IsMultiRegions = false> struct RegionInfoAlloc {
+    RegionInfo *allocate() {
+      UNREACHABLE("RegionInfo is statically allocated");
+    }
 
-  uptr getRegionBaseByClassId(uptr ClassId) {
-    RegionInfo *Region = getRegionInfo(ClassId);
+    void verifyTheNumberOfAllocatedRegionInfo(uptr NumRegionInfo) {
+      DCHECK_EQ(NumRegionInfo, NumClasses);
+    }
+  };
+
+  template <> struct RegionInfoAlloc</*isMultiRegions=*/true> {
+    RegionInfo *allocate() {
+      ScopedLock L(M);
+      return S.pop();
+    }
+
+    void verifyTheNumberOfAllocatedRegionInfo(uptr NumRegionInfo) {
+      ScopedLock L(M);
+      DCHECK_EQ(NumRegionInfo, S.Size);
+    }
+
+    HybridMutex M;
+    // According to the following,
+    //   DR1351: If the brace-or-equal-initializer of a non-static data
+    //   member invokes a defaulted default constructor of its class or of an
+    //   enclosing class in a potentially evaluated subexpression, the program
+    //   is ill-formed.
+    // So we have to `outline` the `Size`/`Array` into another struct `Storage`.
+    struct Storage {
+      RegionInfo *pop() {
+        if (Size == NumEntries)
+          return nullptr;
+        return &Array[Size++];
+      }
+      // The amount memory used by this allocator is about (NumEntries *
+      // RegionSize). For example, region with size 256 KB will have 2GB space
+      // available.
+      // TODO(chiahungduan): Consider having this configurable.
+      static constexpr uptr NumEntries = 1UL << 13;
+      uptr Size = 0;
+      alignas(SCUDO_CACHE_LINE_SIZE) RegionInfo Array[NumEntries];
+    } S GUARDED_BY(M);
+  };
+
+  template <bool IsMultiRegions = false> struct RegionInfoInterface {
+    struct RegionInfoIter {
+      RegionInfoIter(RegionInfo *Region) : CurRegionInfo(Region) {}
+      RegionInfo *operator->() { return CurRegionInfo; }
+      RegionInfoIter &operator++() {
+        CurRegionInfo = nullptr;
+        return *this;
+      }
+      RegionInfo *get() { return CurRegionInfo; }
+      bool end() { return CurRegionInfo == nullptr; }
+      RegionInfo *CurRegionInfo = nullptr;
+    };
+
+    void init(UNUSED RegionInfoAlloc<IsMultiRegions> &Allocator) {
+      // The RegionInfo storage is statically initialized.
+    }
+
+    ALWAYS_INLINE RegionInfo *getCurRegionInfo(uptr ClassId) {
+      DCHECK_LT(ClassId, NumClasses);
+      return &RegionInfoArray[ClassId];
+    }
+    ALWAYS_INLINE RegionInfoIter getRegionInfoIter(uptr ClassId) {
+      return RegionInfoIter(getCurRegionInfo(ClassId));
+    }
+
+    void pushRegionInfo(UNUSED RegionInfo *Region, UNUSED uptr ClassId) {
+      UNREACHABLE("Only MultiRegions supports this operation\n");
+    }
+    // TODO: Review the uses of `getRawRegionInfoArray` and
+    // `getRawRegionInfoArraySize` and see if we can deprecate them.
+    RegionInfo *getRawRegionInfoArray() { return RegionInfoArray; }
+    uptr getRawRegionInfoArraySize() {
+      return static_cast<uptr>(sizeof(RegionInfoArray));
+    }
+    void shuffle(u32 *Seed) {
+      scudo::shuffle(RegionInfoArray, NumClasses, Seed);
+    }
+
+    alignas(SCUDO_CACHE_LINE_SIZE) RegionInfo RegionInfoArray[NumClasses];
+  };
+
+  template <> struct RegionInfoInterface</*isMultiRegions=*/true> {
+    struct RegionInfoIter {
+      RegionInfoIter(RegionInfo *Region, HybridMutex &RegionInfoListLock)
+          : CurRegionInfo(Region), M(RegionInfoListLock) {}
+      RegionInfo *operator->() { return CurRegionInfo; }
+      RegionInfoIter &operator++() {
+        ScopedLock L(M);
+        CurRegionInfo = CurRegionInfo->Next;
+        return *this;
+      }
+      RegionInfo *get() { return CurRegionInfo; }
+      bool end() { return CurRegionInfo == nullptr; }
+      RegionInfo *CurRegionInfo = nullptr;
+      HybridMutex &M;
+    };
+
+    void init(RegionInfoAlloc</*isMultiRegions=*/true> &Allocator) {
+      for (uptr I = 0; I < NumClasses; I++) {
+        RegionInfo *Region = Allocator.allocate();
+        LowestAddrRegionInfo[I].P = Region;
+        CurrentRegionInfo[I].P = Region;
+      }
+    }
+
+    // Return the last pushed RegionInfo. For one size class, the current
+    // RegionInfo is responsible for the page mapping and the other RegionInfos
+    // will have been exhausted already.
+    ALWAYS_INLINE RegionInfo *getCurRegionInfo(uptr ClassId) {
+      DCHECK_LT(ClassId, NumClasses);
+      return CurrentRegionInfo[ClassId].P;
+    }
+
+    ALWAYS_INLINE RegionInfoIter getRegionInfoIter(uptr ClassId) {
+      return RegionInfoIter(LowestAddrRegionInfo[ClassId].P,
+                            RegionInfoLock[ClassId]);
+    }
+
+    // RegionInfos for the same size class will be stored in the order of base
----------------
cferris1000 wrote:

store in the order of base  address -> ordered by base address

https://github.com/llvm/llvm-project/pull/98076


More information about the llvm-commits mailing list