[compiler-rt] 1e9c909 - Revert "[sanitizer] Support dynamic premapped R/W range in primary allocator."

Vitaly Buka via llvm-commits llvm-commits at lists.llvm.org
Mon Mar 22 18:53:08 PDT 2021


Author: Vitaly Buka
Date: 2021-03-22T18:52:56-07:00
New Revision: 1e9c90921fb7ad5c6b28bcc54f5de1cfcf8003d4

URL: https://github.com/llvm/llvm-project/commit/1e9c90921fb7ad5c6b28bcc54f5de1cfcf8003d4
DIFF: https://github.com/llvm/llvm-project/commit/1e9c90921fb7ad5c6b28bcc54f5de1cfcf8003d4.diff

LOG: Revert "[sanitizer] Support dynamic premapped R/W range in primary allocator."

Fails on Windows https://lab.llvm.org/buildbot/#/builders/127/builds/7999
and Android https://lab.llvm.org/buildbot/#/builders/77/builds/4839

This reverts commit bca0cf768b6021124f5e5315be333c2f45f14fca.

Added: 
    

Modified: 
    compiler-rt/lib/sanitizer_common/sanitizer_allocator_combined.h
    compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h
    compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h
    compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp

Removed: 
    


################################################################################
diff  --git a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_combined.h b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_combined.h
index eb836bc47876..33f89d6d4992 100644
--- a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_combined.h
+++ b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_combined.h
@@ -35,9 +35,9 @@ class CombinedAllocator {
     secondary_.InitLinkerInitialized();
   }
 
-  void Init(s32 release_to_os_interval_ms, uptr heap_start = 0) {
+  void Init(s32 release_to_os_interval_ms) {
     stats_.Init();
-    primary_.Init(release_to_os_interval_ms, heap_start);
+    primary_.Init(release_to_os_interval_ms);
     secondary_.Init();
   }
 

diff  --git a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h
index fb5394cd39c4..b90dabbf7769 100644
--- a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h
+++ b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h
@@ -119,8 +119,7 @@ class SizeClassAllocator32 {
   typedef SizeClassAllocator32<Params> ThisT;
   typedef SizeClassAllocator32LocalCache<ThisT> AllocatorCache;
 
-  void Init(s32 release_to_os_interval_ms, uptr heap_start = 0) {
-    CHECK(!heap_start);
+  void Init(s32 release_to_os_interval_ms) {
     possible_regions.Init();
     internal_memset(size_class_info_array, 0, sizeof(size_class_info_array));
   }

diff  --git a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h
index db30e138154a..26753b6c8aeb 100644
--- a/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h
+++ b/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h
@@ -69,45 +69,25 @@ class SizeClassAllocator64 {
     return base + (static_cast<uptr>(ptr32) << kCompactPtrScale);
   }
 
-  // If heap_start is nonzero, assumes kSpaceSize bytes are already mapped R/W
-  // at heap_start and places the heap there.  This mode requires kSpaceBeg ==
-  // ~(uptr)0.
-  void Init(s32 release_to_os_interval_ms, uptr heap_start = 0) {
+  void Init(s32 release_to_os_interval_ms) {
     uptr TotalSpaceSize = kSpaceSize + AdditionalSize();
-    PremappedHeap = heap_start != 0;
-    if (PremappedHeap) {
-      CHECK(!kUsingConstantSpaceBeg);
-      NonConstSpaceBeg = heap_start;
-      uptr RegionInfoSize = AdditionalSize();
-      RegionInfoSpace =
-          address_range.Init(RegionInfoSize, PrimaryAllocatorName);
-      CHECK_NE(RegionInfoSpace, ~(uptr)0);
-      CHECK_EQ(RegionInfoSpace,
-               address_range.MapOrDie(RegionInfoSpace, RegionInfoSize,
-                                      "SizeClassAllocator: region info"));
-      MapUnmapCallback().OnMap(RegionInfoSpace, RegionInfoSize);
+    if (kUsingConstantSpaceBeg) {
+      CHECK(IsAligned(kSpaceBeg, SizeClassMap::kMaxSize));
+      CHECK_EQ(kSpaceBeg, address_range.Init(TotalSpaceSize,
+                                             PrimaryAllocatorName, kSpaceBeg));
     } else {
-      if (kUsingConstantSpaceBeg) {
-        CHECK(IsAligned(kSpaceBeg, SizeClassMap::kMaxSize));
-        CHECK_EQ(kSpaceBeg,
-                 address_range.Init(TotalSpaceSize, PrimaryAllocatorName,
-                                    kSpaceBeg));
-      } else {
-        // Combined allocator expects that an 2^N allocation is always aligned
-        // to 2^N. For this to work, the start of the space needs to be aligned
-        // as high as the largest size class (which also needs to be a power of
-        // 2).
-        NonConstSpaceBeg = address_range.InitAligned(
-            TotalSpaceSize, SizeClassMap::kMaxSize, PrimaryAllocatorName);
-        CHECK_NE(NonConstSpaceBeg, ~(uptr)0);
-      }
-      RegionInfoSpace = SpaceEnd();
-      MapWithCallbackOrDie(RegionInfoSpace, AdditionalSize(),
-                           "SizeClassAllocator: region info");
+      // Combined allocator expects that an 2^N allocation is always aligned to
+      // 2^N. For this to work, the start of the space needs to be aligned as
+      // high as the largest size class (which also needs to be a power of 2).
+      NonConstSpaceBeg = address_range.InitAligned(
+          TotalSpaceSize, SizeClassMap::kMaxSize, PrimaryAllocatorName);
+      CHECK_NE(NonConstSpaceBeg, ~(uptr)0);
     }
     SetReleaseToOSIntervalMs(release_to_os_interval_ms);
+    MapWithCallbackOrDie(SpaceEnd(), AdditionalSize(),
+                         "SizeClassAllocator: region info");
     // Check that the RegionInfo array is aligned on the CacheLine size.
-    DCHECK_EQ(RegionInfoSpace % kCacheLineSize, 0);
+    DCHECK_EQ(SpaceEnd() % kCacheLineSize, 0);
   }
 
   s32 ReleaseToOSIntervalMs() const {
@@ -616,11 +596,6 @@ class SizeClassAllocator64 {
 
   atomic_sint32_t release_to_os_interval_ms_;
 
-  uptr RegionInfoSpace;
-
-  // True if the user has already mapped the entire heap R/W.
-  bool PremappedHeap;
-
   struct Stats {
     uptr n_allocated;
     uptr n_freed;
@@ -650,7 +625,7 @@ class SizeClassAllocator64 {
 
   RegionInfo *GetRegionInfo(uptr class_id) const {
     DCHECK_LT(class_id, kNumClasses);
-    RegionInfo *regions = reinterpret_cast<RegionInfo *>(RegionInfoSpace);
+    RegionInfo *regions = reinterpret_cast<RegionInfo *>(SpaceEnd());
     return &regions[class_id];
   }
 
@@ -675,9 +650,6 @@ class SizeClassAllocator64 {
   }
 
   bool MapWithCallback(uptr beg, uptr size, const char *name) {
-    if (PremappedHeap)
-      return beg >= NonConstSpaceBeg &&
-             beg + size <= NonConstSpaceBeg + kSpaceSize;
     uptr mapped = address_range.Map(beg, size, name);
     if (UNLIKELY(!mapped))
       return false;
@@ -687,18 +659,11 @@ class SizeClassAllocator64 {
   }
 
   void MapWithCallbackOrDie(uptr beg, uptr size, const char *name) {
-    if (PremappedHeap) {
-      CHECK_GE(beg, NonConstSpaceBeg);
-      CHECK_LE(beg + size, NonConstSpaceBeg + kSpaceSize);
-      return;
-    }
     CHECK_EQ(beg, address_range.MapOrDie(beg, size, name));
     MapUnmapCallback().OnMap(beg, size);
   }
 
   void UnmapWithCallbackOrDie(uptr beg, uptr size) {
-    if (PremappedHeap)
-      return;
     MapUnmapCallback().OnUnmap(beg, size);
     address_range.Unmap(beg, size);
   }
@@ -867,9 +832,6 @@ class SizeClassAllocator64 {
 
   // Attempts to release RAM occupied by freed chunks back to OS. The region is
   // expected to be locked.
-  //
-  // TODO(morehouse): Support a callback on memory release so HWASan can release
-  // aliases as well.
   void MaybeReleaseToOS(uptr class_id, bool force) {
     RegionInfo *region = GetRegionInfo(class_id);
     const uptr chunk_size = ClassIdToSize(class_id);

diff  --git a/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp b/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp
index 7c95b785987e..590e477678ea 100644
--- a/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp
+++ b/compiler-rt/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp
@@ -196,9 +196,9 @@ TEST(SanitizerCommon, DenseSizeClassMap) {
 }
 
 template <class Allocator>
-void TestSizeClassAllocator(uptr premapped_heap = 0) {
+void TestSizeClassAllocator() {
   Allocator *a = new Allocator;
-  a->Init(kReleaseToOSIntervalNever, premapped_heap);
+  a->Init(kReleaseToOSIntervalNever);
   typename Allocator::AllocatorCache cache;
   memset(&cache, 0, sizeof(cache));
   cache.Init(0);
@@ -265,25 +265,6 @@ void TestSizeClassAllocator(uptr premapped_heap = 0) {
 }
 
 #if SANITIZER_CAN_USE_ALLOCATOR64
-
-// Allocates kAllocatorSize aligned bytes on construction and frees it on
-// destruction.
-class ScopedPremappedHeap {
- public:
-  ScopedPremappedHeap() {
-    BasePtr = MmapNoReserveOrDie(2 * kAllocatorSize, "preallocated heap");
-    AlignedAddr = RoundUpTo(reinterpret_cast<uptr>(BasePtr), kAllocatorSize);
-  }
-
-  ~ScopedPremappedHeap() { UnmapOrDie(BasePtr, kAllocatorSize); }
-
-  uptr Addr() { return AlignedAddr; }
-
- private:
-  void *BasePtr;
-  uptr AlignedAddr;
-};
-
 // These tests can fail on Windows if memory is somewhat full and lit happens
 // to run them all at the same time. FIXME: Make them not flaky and reenable.
 #if !SANITIZER_WINDOWS
@@ -295,11 +276,6 @@ TEST(SanitizerCommon, SizeClassAllocator64Dynamic) {
   TestSizeClassAllocator<Allocator64Dynamic>();
 }
 
-TEST(SanitizerCommon, SizeClassAllocator64DynamicPremapped) {
-  ScopedPremappedHeap h;
-  TestSizeClassAllocator<Allocator64Dynamic>(h.Addr());
-}
-
 #if !SANITIZER_ANDROID
 //FIXME(kostyak): find values so that those work on Android as well.
 TEST(SanitizerCommon, SizeClassAllocator64Compact) {
@@ -344,9 +320,9 @@ TEST(SanitizerCommon, SizeClassAllocator32SeparateBatches) {
 }
 
 template <class Allocator>
-void SizeClassAllocatorMetadataStress(uptr premapped_heap = 0) {
+void SizeClassAllocatorMetadataStress() {
   Allocator *a = new Allocator;
-  a->Init(kReleaseToOSIntervalNever, premapped_heap);
+  a->Init(kReleaseToOSIntervalNever);
   typename Allocator::AllocatorCache cache;
   memset(&cache, 0, sizeof(cache));
   cache.Init(0);
@@ -385,11 +361,6 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicMetadataStress) {
   SizeClassAllocatorMetadataStress<Allocator64Dynamic>();
 }
 
-TEST(SanitizerCommon, SizeClassAllocator64DynamicPremappedMetadataStress) {
-  ScopedPremappedHeap h;
-  SizeClassAllocatorMetadataStress<Allocator64Dynamic>(h.Addr());
-}
-
 #if !SANITIZER_ANDROID
 TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
   SizeClassAllocatorMetadataStress<Allocator64Compact>();
@@ -403,10 +374,9 @@ TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
 }
 
 template <class Allocator>
-void SizeClassAllocatorGetBlockBeginStress(u64 TotalSize,
-                                           uptr premapped_heap = 0) {
+void SizeClassAllocatorGetBlockBeginStress(u64 TotalSize) {
   Allocator *a = new Allocator;
-  a->Init(kReleaseToOSIntervalNever, premapped_heap);
+  a->Init(kReleaseToOSIntervalNever);
   typename Allocator::AllocatorCache cache;
   memset(&cache, 0, sizeof(cache));
   cache.Init(0);
@@ -438,11 +408,6 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicGetBlockBegin) {
   SizeClassAllocatorGetBlockBeginStress<Allocator64Dynamic>(
       1ULL << (SANITIZER_ANDROID ? 31 : 33));
 }
-TEST(SanitizerCommon, SizeClassAllocator64DynamicPremappedGetBlockBegin) {
-  ScopedPremappedHeap h;
-  SizeClassAllocatorGetBlockBeginStress<Allocator64Dynamic>(
-      1ULL << (SANITIZER_ANDROID ? 31 : 33), h.Addr());
-}
 #if !SANITIZER_ANDROID
 TEST(SanitizerCommon, SizeClassAllocator64CompactGetBlockBegin) {
   SizeClassAllocatorGetBlockBeginStress<Allocator64Compact>(1ULL << 33);
@@ -659,10 +624,10 @@ TEST(SanitizerCommon, LargeMmapAllocator) {
 }
 
 template <class PrimaryAllocator>
-void TestCombinedAllocator(uptr premapped_heap = 0) {
+void TestCombinedAllocator() {
   typedef CombinedAllocator<PrimaryAllocator> Allocator;
   Allocator *a = new Allocator;
-  a->Init(kReleaseToOSIntervalNever, premapped_heap);
+  a->Init(kReleaseToOSIntervalNever);
   std::mt19937 r;
 
   typename Allocator::AllocatorCache cache;
@@ -733,11 +698,6 @@ TEST(SanitizerCommon, CombinedAllocator64Dynamic) {
   TestCombinedAllocator<Allocator64Dynamic>();
 }
 
-TEST(SanitizerCommon, CombinedAllocator64DynamicPremapped) {
-  ScopedPremappedHeap h;
-  TestCombinedAllocator<Allocator64Dynamic>(h.Addr());
-}
-
 #if !SANITIZER_ANDROID
 TEST(SanitizerCommon, CombinedAllocator64Compact) {
   TestCombinedAllocator<Allocator64Compact>();
@@ -754,12 +714,12 @@ TEST(SanitizerCommon, SKIP_ON_SOLARIS_SPARCV9(CombinedAllocator32Compact)) {
 }
 
 template <class Allocator>
-void TestSizeClassAllocatorLocalCache(uptr premapped_heap = 0) {
+void TestSizeClassAllocatorLocalCache() {
   using AllocatorCache = typename Allocator::AllocatorCache;
   AllocatorCache cache;
   Allocator *a = new Allocator();
 
-  a->Init(kReleaseToOSIntervalNever, premapped_heap);
+  a->Init(kReleaseToOSIntervalNever);
   memset(&cache, 0, sizeof(cache));
   cache.Init(0);
 
@@ -799,11 +759,6 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicLocalCache) {
   TestSizeClassAllocatorLocalCache<Allocator64Dynamic>();
 }
 
-TEST(SanitizerCommon, SizeClassAllocator64DynamicPremappedLocalCache) {
-  ScopedPremappedHeap h;
-  TestSizeClassAllocatorLocalCache<Allocator64Dynamic>(h.Addr());
-}
-
 #if !SANITIZER_ANDROID
 TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
   TestSizeClassAllocatorLocalCache<Allocator64Compact>();
@@ -936,9 +891,9 @@ void IterationTestCallback(uptr chunk, void *arg) {
 }
 
 template <class Allocator>
-void TestSizeClassAllocatorIteration(uptr premapped_heap = 0) {
+void TestSizeClassAllocatorIteration() {
   Allocator *a = new Allocator;
-  a->Init(kReleaseToOSIntervalNever, premapped_heap);
+  a->Init(kReleaseToOSIntervalNever);
   typename Allocator::AllocatorCache cache;
   memset(&cache, 0, sizeof(cache));
   cache.Init(0);
@@ -987,10 +942,6 @@ TEST(SanitizerCommon, SizeClassAllocator64Iteration) {
 TEST(SanitizerCommon, SizeClassAllocator64DynamicIteration) {
   TestSizeClassAllocatorIteration<Allocator64Dynamic>();
 }
-TEST(SanitizerCommon, SizeClassAllocator64DynamicPremappedIteration) {
-  ScopedPremappedHeap h;
-  TestSizeClassAllocatorIteration<Allocator64Dynamic>(h.Addr());
-}
 #endif
 #endif
 


        


More information about the llvm-commits mailing list