[compiler-rt] r369441 - [AArch64] Speed-up leak and address sanitizers on AArch64 for 48-bit VMA

Sebastian Pop via llvm-commits llvm-commits at lists.llvm.org
Tue Aug 20 13:54:06 PDT 2019


Author: spop
Date: Tue Aug 20 13:54:05 2019
New Revision: 369441

URL: http://llvm.org/viewvc/llvm-project?rev=369441&view=rev
Log:
[AArch64] Speed-up leak and address sanitizers on AArch64 for 48-bit VMA

This patch fixes https://github.com/google/sanitizers/issues/703
On a Graviton-A1 aarch64 machine with 48-bit VMA,
the time spent in LSan and ASan reduced from 2.5s to 0.01s when running

clang -fsanitize=leak compiler-rt/test/lsan/TestCases/sanity_check_pure_c.c && time ./a.out
clang -fsanitize=address compiler-rt/test/lsan/TestCases/sanity_check_pure_c.c && time ./a.out

With this patch, LSan and ASan create both the 32 and 64 allocators and select
at run time between the two allocators following a global variable that is
initialized at init time to whether the allocator64 can be used in the virtual
address space.

Differential Revision: https://reviews.llvm.org/D60243

Added:
    compiler-rt/trunk/lib/sanitizer_common/sanitizer_runtime_select_allocator.h
Modified:
    compiler-rt/trunk/lib/asan/asan_allocator.cpp
    compiler-rt/trunk/lib/asan/asan_allocator.h
    compiler-rt/trunk/lib/asan/asan_stats.cpp
    compiler-rt/trunk/lib/asan/asan_stats.h
    compiler-rt/trunk/lib/lsan/lsan_allocator.h
    compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.h
    compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_combined.h
    compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary32.h
    compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary64.h
    compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp

Modified: compiler-rt/trunk/lib/asan/asan_allocator.cpp
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_allocator.cpp?rev=369441&r1=369440&r2=369441&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_allocator.cpp (original)
+++ compiler-rt/trunk/lib/asan/asan_allocator.cpp Tue Aug 20 13:54:05 2019
@@ -48,8 +48,6 @@ static u32 RZSize2Log(u32 rz_size) {
   return res;
 }
 
-static AsanAllocator &get_allocator();
-
 // The memory chunk allocated from the underlying allocator looks like this:
 // L L L L L L H H U U U U U U R R
 //   L -- left redzone words (0 or more bytes)
@@ -113,7 +111,7 @@ enum {
 struct AsanChunk: ChunkBase {
   uptr Beg() { return reinterpret_cast<uptr>(this) + kChunkHeaderSize; }
   uptr UsedSize(bool locked_version = false) {
-    if (user_requested_size != SizeClassMap::kMaxSize)
+    if (user_requested_size != get_allocator().KMaxSize())
       return user_requested_size;
     return *reinterpret_cast<uptr *>(
                get_allocator().GetMetaData(AllocBeg(locked_version)));
@@ -430,7 +428,7 @@ struct Allocator {
     bool using_primary_allocator = true;
     // If we are allocating from the secondary allocator, there will be no
     // automatic right redzone, so add the right redzone manually.
-    if (!PrimaryAllocator::CanAllocate(needed_size, alignment)) {
+    if (!get_allocator().CanAllocate(needed_size, alignment)) {
       needed_size += rz_size;
       using_primary_allocator = false;
     }
@@ -499,7 +497,7 @@ struct Allocator {
       CHECK(allocator.FromPrimary(allocated));
     } else {
       CHECK(!allocator.FromPrimary(allocated));
-      m->user_requested_size = SizeClassMap::kMaxSize;
+      m->user_requested_size = get_allocator().KMaxSize();
       uptr *meta = reinterpret_cast<uptr *>(allocator.GetMetaData(allocated));
       meta[0] = size;
       meta[1] = chunk_beg;
@@ -524,10 +522,10 @@ struct Allocator {
     thread_stats.mallocs++;
     thread_stats.malloced += size;
     thread_stats.malloced_redzones += needed_size - size;
-    if (needed_size > SizeClassMap::kMaxSize)
+    if (needed_size > get_allocator().KMaxSize())
       thread_stats.malloc_large++;
     else
-      thread_stats.malloced_by_size[SizeClassMap::ClassID(needed_size)]++;
+      thread_stats.malloced_by_size[get_allocator().ClassID(needed_size)]++;
 
     void *res = reinterpret_cast<void *>(user_beg);
     if (can_fill && fl.max_malloc_fill_size) {
@@ -791,7 +789,7 @@ struct Allocator {
 
 static Allocator instance(LINKER_INITIALIZED);
 
-static AsanAllocator &get_allocator() {
+AsanAllocator &get_allocator() {
   return instance.allocator;
 }
 

Modified: compiler-rt/trunk/lib/asan/asan_allocator.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_allocator.h?rev=369441&r1=369440&r2=369441&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_allocator.h (original)
+++ compiler-rt/trunk/lib/asan/asan_allocator.h Tue Aug 20 13:54:05 2019
@@ -118,39 +118,76 @@ struct AsanMapUnmapCallback {
   void OnUnmap(uptr p, uptr size) const;
 };
 
-#if SANITIZER_CAN_USE_ALLOCATOR64
+#if defined(__aarch64__)
+// AArch64 supports 39, 42 and 48-bit VMA.
+const uptr kAllocatorSpace = ~(uptr)0;
+#if SANITIZER_ANDROID
+const uptr kAllocatorSize = 0x2000000000ULL;  // 128G.
+typedef VeryCompactSizeClassMap SizeClassMap64;
+#else
+const uptr kAllocatorSize = 0x40000000000ULL;  // 4T.
+typedef DefaultSizeClassMap SizeClassMap64;
+#endif
+
+template <typename AddressSpaceViewTy>
+struct AP64 {  // Allocator64 parameters. Deliberately using a short name.
+  static const uptr kSpaceBeg = kAllocatorSpace;
+  static const uptr kSpaceSize = kAllocatorSize;
+  static const uptr kMetadataSize = 0;
+  typedef __asan::SizeClassMap64 SizeClassMap;
+  typedef AsanMapUnmapCallback MapUnmapCallback;
+  static const uptr kFlags = 0;
+  using AddressSpaceView = AddressSpaceViewTy;
+};
+template <typename AddressSpaceView>
+using Allocator64ASVT = SizeClassAllocator64<AP64<AddressSpaceView>>;
+using Allocator64 = Allocator64ASVT<LocalAddressSpaceView>;
+
+typedef CompactSizeClassMap SizeClassMap32;
+template <typename AddressSpaceViewTy>
+struct AP32 {
+  static const uptr kSpaceBeg = 0;
+  static const u64 kSpaceSize = SANITIZER_MMAP_RANGE_SIZE;
+  static const uptr kMetadataSize = 16;
+  typedef __asan::SizeClassMap32 SizeClassMap;
+  static const uptr kRegionSizeLog = 20;
+  using AddressSpaceView = AddressSpaceViewTy;
+  typedef AsanMapUnmapCallback MapUnmapCallback;
+  static const uptr kFlags = 0;
+};
+template <typename AddressSpaceView>
+using Allocator32ASVT = SizeClassAllocator32<AP32<AddressSpaceView>>;
+using Allocator32 = Allocator32ASVT<LocalAddressSpaceView>;
+using Allocator32or64 = RuntimeSelectAllocator<Allocator32, Allocator64>;
+
+static const uptr kMaxNumberOfSizeClasses =
+    SizeClassMap32::kNumClasses < SizeClassMap64::kNumClasses
+        ? SizeClassMap64::kNumClasses
+        : SizeClassMap32::kNumClasses;
+
+template <typename AddressSpaceView>
+using PrimaryAllocatorASVT =
+    RuntimeSelectAllocator<Allocator32ASVT<AddressSpaceView>,
+                           Allocator64ASVT<AddressSpaceView>>;
+#elif SANITIZER_CAN_USE_ALLOCATOR64
 # if SANITIZER_FUCHSIA
 const uptr kAllocatorSpace = ~(uptr)0;
 const uptr kAllocatorSize  =  0x40000000000ULL;  // 4T.
-typedef DefaultSizeClassMap SizeClassMap;
 # elif defined(__powerpc64__)
 const uptr kAllocatorSpace = ~(uptr)0;
 const uptr kAllocatorSize  =  0x20000000000ULL;  // 2T.
-typedef DefaultSizeClassMap SizeClassMap;
-# elif defined(__aarch64__) && SANITIZER_ANDROID
-// Android needs to support 39, 42 and 48 bit VMA.
-const uptr kAllocatorSpace =  ~(uptr)0;
-const uptr kAllocatorSize  =  0x2000000000ULL;  // 128G.
-typedef VeryCompactSizeClassMap SizeClassMap;
-# elif defined(__aarch64__)
-// AArch64/SANITIZER_CAN_USE_ALLOCATOR64 is only for 42-bit VMA
-// so no need to different values for different VMA.
-const uptr kAllocatorSpace =  0x10000000000ULL;
-const uptr kAllocatorSize  =  0x10000000000ULL;  // 3T.
-typedef DefaultSizeClassMap SizeClassMap;
-#elif defined(__sparc__)
+# elif defined(__sparc__)
 const uptr kAllocatorSpace = ~(uptr)0;
 const uptr kAllocatorSize = 0x20000000000ULL;  // 2T.
-typedef DefaultSizeClassMap SizeClassMap;
 # elif SANITIZER_WINDOWS
 const uptr kAllocatorSpace = ~(uptr)0;
 const uptr kAllocatorSize  =  0x8000000000ULL;  // 500G
-typedef DefaultSizeClassMap SizeClassMap;
 # else
 const uptr kAllocatorSpace = 0x600000000000ULL;
 const uptr kAllocatorSize  =  0x40000000000ULL;  // 4T.
-typedef DefaultSizeClassMap SizeClassMap;
 # endif
+typedef DefaultSizeClassMap SizeClassMap;
+static const uptr kMaxNumberOfSizeClasses = SizeClassMap::kNumClasses;
 template <typename AddressSpaceViewTy>
 struct AP64 {  // Allocator64 parameters. Deliberately using a short name.
   static const uptr kSpaceBeg = kAllocatorSpace;
@@ -164,9 +201,9 @@ struct AP64 {  // Allocator64 parameters
 
 template <typename AddressSpaceView>
 using PrimaryAllocatorASVT = SizeClassAllocator64<AP64<AddressSpaceView>>;
-using PrimaryAllocator = PrimaryAllocatorASVT<LocalAddressSpaceView>;
 #else  // Fallback to SizeClassAllocator32.
 typedef CompactSizeClassMap SizeClassMap;
+static const uptr kMaxNumberOfSizeClasses = SizeClassMap::kNumClasses;
 template <typename AddressSpaceViewTy>
 struct AP32 {
   static const uptr kSpaceBeg = 0;
@@ -180,16 +217,14 @@ struct AP32 {
 };
 template <typename AddressSpaceView>
 using PrimaryAllocatorASVT = SizeClassAllocator32<AP32<AddressSpaceView> >;
-using PrimaryAllocator = PrimaryAllocatorASVT<LocalAddressSpaceView>;
 #endif  // SANITIZER_CAN_USE_ALLOCATOR64
 
-static const uptr kNumberOfSizeClasses = SizeClassMap::kNumClasses;
-
 template <typename AddressSpaceView>
 using AsanAllocatorASVT =
     CombinedAllocator<PrimaryAllocatorASVT<AddressSpaceView>>;
 using AsanAllocator = AsanAllocatorASVT<LocalAddressSpaceView>;
 using AllocatorCache = AsanAllocator::AllocatorCache;
+using PrimaryAllocator = PrimaryAllocatorASVT<LocalAddressSpaceView>;
 
 struct AsanThreadLocalMallocStorage {
   uptr quarantine_cache[16];
@@ -226,5 +261,7 @@ void asan_mz_force_unlock();
 void PrintInternalAllocatorStats();
 void AsanSoftRssLimitExceededCallback(bool exceeded);
 
+AsanAllocator &get_allocator();
+
 }  // namespace __asan
 #endif  // ASAN_ALLOCATOR_H

Modified: compiler-rt/trunk/lib/asan/asan_stats.cpp
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_stats.cpp?rev=369441&r1=369440&r2=369441&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_stats.cpp (original)
+++ compiler-rt/trunk/lib/asan/asan_stats.cpp Tue Aug 20 13:54:05 2019
@@ -10,6 +10,7 @@
 //
 // Code related to statistics collected by AddressSanitizer.
 //===----------------------------------------------------------------------===//
+#include "asan_allocator.h"
 #include "asan_interceptors.h"
 #include "asan_internal.h"
 #include "asan_stats.h"
@@ -30,9 +31,9 @@ void AsanStats::Clear() {
 }
 
 static void PrintMallocStatsArray(const char *prefix,
-                                  uptr (&array)[kNumberOfSizeClasses]) {
+                                  uptr *array, uptr size) {
   Printf("%s", prefix);
-  for (uptr i = 0; i < kNumberOfSizeClasses; i++) {
+  for (uptr i = 0; i < size; i++) {
     if (!array[i]) continue;
     Printf("%zu:%zu; ", i, array[i]);
   }
@@ -50,7 +51,8 @@ void AsanStats::Print() {
              (mmaped-munmaped)>>20, mmaped>>20, munmaped>>20,
              mmaps, munmaps);
 
-  PrintMallocStatsArray("  mallocs by size class: ", malloced_by_size);
+  PrintMallocStatsArray("  mallocs by size class: ", malloced_by_size,
+                        get_allocator().KMaxSize());
   Printf("Stats: malloc large: %zu\n", malloc_large);
 }
 

Modified: compiler-rt/trunk/lib/asan/asan_stats.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_stats.h?rev=369441&r1=369440&r2=369441&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_stats.h (original)
+++ compiler-rt/trunk/lib/asan/asan_stats.h Tue Aug 20 13:54:05 2019
@@ -38,7 +38,7 @@ struct AsanStats {
   uptr munmaps;
   uptr munmaped;
   uptr malloc_large;
-  uptr malloced_by_size[kNumberOfSizeClasses];
+  uptr malloced_by_size[kMaxNumberOfSizeClasses];
 
   // Ctor for global AsanStats (accumulated stats for dead threads).
   explicit AsanStats(LinkerInitialized) { }

Modified: compiler-rt/trunk/lib/lsan/lsan_allocator.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/lsan/lsan_allocator.h?rev=369441&r1=369440&r2=369441&view=diff
==============================================================================
--- compiler-rt/trunk/lib/lsan/lsan_allocator.h (original)
+++ compiler-rt/trunk/lib/lsan/lsan_allocator.h Tue Aug 20 13:54:05 2019
@@ -49,8 +49,46 @@ struct ChunkMetadata {
   u32 stack_trace_id;
 };
 
-#if defined(__mips64) || defined(__aarch64__) || defined(__i386__) || \
-    defined(__arm__)
+#if defined(__aarch64__)
+template <typename AddressSpaceViewTy>
+struct AP32 {
+  static const uptr kSpaceBeg = 0;
+  static const u64 kSpaceSize = SANITIZER_MMAP_RANGE_SIZE;
+  static const uptr kMetadataSize = sizeof(ChunkMetadata);
+  typedef __sanitizer::CompactSizeClassMap SizeClassMap;
+  static const uptr kRegionSizeLog = 20;
+  using AddressSpaceView = AddressSpaceViewTy;
+  typedef NoOpMapUnmapCallback MapUnmapCallback;
+  static const uptr kFlags = 0;
+};
+
+const uptr kAllocatorSpace = 0x600000000000ULL;
+const uptr kAllocatorSize = 0x40000000000ULL;  // 4T.
+
+template <typename AddressSpaceViewTy>
+struct AP64 {  // Allocator64 parameters. Deliberately using a short name.
+  static const uptr kSpaceBeg = kAllocatorSpace;
+  static const uptr kSpaceSize = kAllocatorSize;
+  static const uptr kMetadataSize = sizeof(ChunkMetadata);
+  typedef DefaultSizeClassMap SizeClassMap;
+  typedef NoOpMapUnmapCallback MapUnmapCallback;
+  static const uptr kFlags = 0;
+  using AddressSpaceView = AddressSpaceViewTy;
+};
+
+template <typename AddressSpaceView>
+using Allocator32ASVT = SizeClassAllocator32<AP32<AddressSpaceView>>;
+template <typename AddressSpaceView>
+using Allocator64ASVT = SizeClassAllocator64<AP64<AddressSpaceView>>;
+
+using Allocator32 = Allocator32ASVT<LocalAddressSpaceView>;
+using Allocator64 = Allocator64ASVT<LocalAddressSpaceView>;
+
+template <typename AddressSpaceView>
+using PrimaryAllocatorASVT =
+    RuntimeSelectAllocator<Allocator32ASVT<AddressSpaceView>,
+                           Allocator64ASVT<AddressSpaceView>>;
+#elif defined(__mips64) || defined(__i386__) || defined(__arm__)
 template <typename AddressSpaceViewTy>
 struct AP32 {
   static const uptr kSpaceBeg = 0;
@@ -64,7 +102,6 @@ struct AP32 {
 };
 template <typename AddressSpaceView>
 using PrimaryAllocatorASVT = SizeClassAllocator32<AP32<AddressSpaceView>>;
-using PrimaryAllocator = PrimaryAllocatorASVT<LocalAddressSpaceView>;
 #elif defined(__x86_64__) || defined(__powerpc64__)
 # if defined(__powerpc64__)
 const uptr kAllocatorSpace = 0xa0000000000ULL;
@@ -86,13 +123,13 @@ struct AP64 {  // Allocator64 parameters
 
 template <typename AddressSpaceView>
 using PrimaryAllocatorASVT = SizeClassAllocator64<AP64<AddressSpaceView>>;
-using PrimaryAllocator = PrimaryAllocatorASVT<LocalAddressSpaceView>;
 #endif
 
 template <typename AddressSpaceView>
 using AllocatorASVT = CombinedAllocator<PrimaryAllocatorASVT<AddressSpaceView>>;
 using Allocator = AllocatorASVT<LocalAddressSpaceView>;
 using AllocatorCache = Allocator::AllocatorCache;
+using PrimaryAllocator = PrimaryAllocatorASVT<LocalAddressSpaceView>;
 
 Allocator::AllocatorCache *GetAllocatorCache();
 

Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.h?rev=369441&r1=369440&r2=369441&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.h (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.h Tue Aug 20 13:54:05 2019
@@ -75,6 +75,7 @@ INLINE void RandomShuffle(T *a, u32 n, u
 #include "sanitizer_allocator_local_cache.h"
 #include "sanitizer_allocator_secondary.h"
 #include "sanitizer_allocator_combined.h"
+#include "sanitizer_runtime_select_allocator.h"
 
 } // namespace __sanitizer
 

Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_combined.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_combined.h?rev=369441&r1=369440&r2=369441&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_combined.h (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_combined.h Tue Aug 20 13:54:05 2019
@@ -41,6 +41,10 @@ class CombinedAllocator {
     secondary_.Init();
   }
 
+  bool CanAllocate(uptr size, uptr alignment) {
+    return primary_.CanAllocate(size, alignment);
+  }
+
   void *Allocate(AllocatorCache *cache, uptr size, uptr alignment) {
     // Returning 0 on malloc(0) may break a lot of code.
     if (size == 0)
@@ -194,6 +198,10 @@ class CombinedAllocator {
     secondary_.ForEachChunk(callback, arg);
   }
 
+  uptr KNumClasses() { return primary_.KNumClasses(); }
+  uptr KMaxSize() { return primary_.KMaxSize(); }
+  uptr ClassID(uptr size) { return primary_.ClassID(size); }
+
  private:
   PrimaryAllocator primary_;
   SecondaryAllocator secondary_;

Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary32.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary32.h?rev=369441&r1=369440&r2=369441&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary32.h (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary32.h Tue Aug 20 13:54:05 2019
@@ -271,6 +271,9 @@ class SizeClassAllocator32 {
   typedef SizeClassMap SizeClassMapT;
   static const uptr kNumClasses = SizeClassMap::kNumClasses;
 
+  static uptr KNumClasses() { return SizeClassMap::kNumClasses; }
+  static uptr KMaxSize() { return SizeClassMap::kMaxSize; }
+
  private:
   static const uptr kRegionSize = 1 << kRegionSizeLog;
   static const uptr kNumPossibleRegions = kSpaceSize / kRegionSize;

Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary64.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary64.h?rev=369441&r1=369440&r2=369441&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary64.h (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary64.h Tue Aug 20 13:54:05 2019
@@ -319,6 +319,9 @@ class SizeClassAllocator64 {
   static const uptr kNumClasses = SizeClassMap::kNumClasses;
   static const uptr kNumClassesRounded = SizeClassMap::kNumClassesRounded;
 
+  static uptr KNumClasses() { return SizeClassMap::kNumClasses; }
+  static uptr KMaxSize() { return SizeClassMap::kMaxSize; }
+
   // A packed array of counters. Each counter occupies 2^n bits, enough to store
   // counter's max_value. Ctor will try to allocate the required buffer via
   // mapper->MapPackedCounterArrayBuffer and the caller is expected to check

Added: compiler-rt/trunk/lib/sanitizer_common/sanitizer_runtime_select_allocator.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_runtime_select_allocator.h?rev=369441&view=auto
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_runtime_select_allocator.h (added)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_runtime_select_allocator.h Tue Aug 20 13:54:05 2019
@@ -0,0 +1,179 @@
+//===-- sanitizer_runtime_select_allocator.h --------------------*- C++ -*-===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// Select one of the two allocators at runtime.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef SANITIZER_RUNTIME_SELECT_ALLOCATOR_H
+#define SANITIZER_RUNTIME_SELECT_ALLOCATOR_H
+
+template <class Allocator1, class Allocator2>
+class RuntimeSelectAllocator {
+  Allocator1 a1;
+  Allocator2 a2;
+
+ public:
+  bool use_first_allocator;
+
+  class RuntimeSelectAllocatorCache {
+    typename Allocator1::AllocatorCache a1;
+    typename Allocator2::AllocatorCache a2;
+
+   public:
+    void Init(AllocatorGlobalStats *s) {
+      if (this->use_first_allocator)
+        a1.Init(s);
+      else
+        a2.Init(s);
+    }
+    void *Allocate(RuntimeSelectAllocator *allocator, uptr class_id) {
+      if (allocator->use_first_allocator)
+        return a1.Allocate(&allocator->a1, class_id);
+      return a2.Allocate(&allocator->a2, class_id);
+    }
+
+    void Deallocate(RuntimeSelectAllocator *allocator, uptr class_id, void *p) {
+      if (allocator->use_first_allocator)
+        a1.Deallocate(&allocator->a1, class_id, p);
+      else
+        a2.Deallocate(&allocator->a2, class_id, p);
+    }
+
+    void Drain(RuntimeSelectAllocator *allocator) {
+      if (allocator->use_first_allocator)
+        a1.Drain(&allocator->a1);
+      else
+        a2.Drain(&allocator->a2);
+    }
+
+    void Destroy(RuntimeSelectAllocator *allocator, AllocatorGlobalStats *s) {
+      if (allocator->use_first_allocator)
+        a1.Destroy(&allocator->a1, s);
+      else
+        a2.Destroy(&allocator->a2, s);
+    }
+  };
+
+  using MapUnmapCallback = typename Allocator1::MapUnmapCallback;
+  using AddressSpaceView = typename Allocator1::AddressSpaceView;
+  using AllocatorCache = RuntimeSelectAllocatorCache;
+
+  void Init(s32 release_to_os_interval_ms) {
+    // Use the first allocator when the address
+    // space is too small for the 64-bit allocator.
+    use_first_allocator = GetMaxVirtualAddress() < (((uptr)1ULL << 48) - 1);
+    if (use_first_allocator)
+      a1.Init(release_to_os_interval_ms);
+    else
+      a2.Init(release_to_os_interval_ms);
+  }
+
+  bool CanAllocate(uptr size, uptr alignment) {
+    if (use_first_allocator)
+      return Allocator1::CanAllocate(size, alignment);
+    return Allocator2::CanAllocate(size, alignment);
+  }
+
+  uptr ClassID(uptr size) {
+    if (use_first_allocator)
+      return Allocator1::ClassID(size);
+    return Allocator2::ClassID(size);
+  }
+
+  uptr KNumClasses() {
+    if (use_first_allocator)
+      return Allocator1::KNumClasses();
+    return Allocator2::KNumClasses();
+  }
+
+  uptr KMaxSize() {
+    if (use_first_allocator)
+      return Allocator1::KMaxSize();
+    return Allocator2::KMaxSize();
+  }
+
+  bool PointerIsMine(const void *p) {
+    if (use_first_allocator)
+      return a1.PointerIsMine(p);
+    return a2.PointerIsMine(p);
+  }
+
+  void *GetMetaData(const void *p) {
+    if (use_first_allocator)
+      return a1.GetMetaData(p);
+    return a2.GetMetaData(p);
+  }
+
+  uptr GetSizeClass(const void *p) {
+    if (use_first_allocator)
+      return a1.GetSizeClass(p);
+    return a2.GetSizeClass(p);
+  }
+
+  void ForEachChunk(ForEachChunkCallback callback, void *arg) {
+    if (use_first_allocator)
+      a1.ForEachChunk(callback, arg);
+    else
+      a2.ForEachChunk(callback, arg);
+  }
+
+  void TestOnlyUnmap() {
+    if (use_first_allocator)
+      a1.TestOnlyUnmap();
+    else
+      a2.TestOnlyUnmap();
+  }
+  void ForceLock() {
+    if (use_first_allocator)
+      a1.ForceLock();
+    else
+      a2.ForceLock();
+  }
+  void ForceUnlock() {
+    if (use_first_allocator)
+      a1.ForceUnlock();
+    else
+      a2.ForceUnlock();
+  }
+  void *GetBlockBegin(const void *p) {
+    if (use_first_allocator)
+      return a1.GetBlockBegin(p);
+    return a2.GetBlockBegin(p);
+  }
+  uptr GetActuallyAllocatedSize(void *p) {
+    if (use_first_allocator)
+      return a1.GetActuallyAllocatedSize(p);
+    return a2.GetActuallyAllocatedSize(p);
+  }
+  void SetReleaseToOSIntervalMs(s32 release_to_os_interval_ms) {
+    if (use_first_allocator)
+      a1.SetReleaseToOSIntervalMs(release_to_os_interval_ms);
+    else
+      a2.SetReleaseToOSIntervalMs(release_to_os_interval_ms);
+  }
+  s32 ReleaseToOSIntervalMs() const {
+    if (use_first_allocator)
+      return a1.ReleaseToOSIntervalMs();
+    return a2.ReleaseToOSIntervalMs();
+  }
+  void ForceReleaseToOS() {
+    if (use_first_allocator)
+      a1.ForceReleaseToOS();
+    else
+      a2.ForceReleaseToOS();
+  }
+  void PrintStats() {
+    if (use_first_allocator)
+      a1.PrintStats();
+    else
+      a2.PrintStats();
+  }
+};
+
+#endif // SANITIZER_RUNTIME_SELECT_ALLOCATOR_H

Modified: compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp?rev=369441&r1=369440&r2=369441&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp (original)
+++ compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_allocator_test.cpp Tue Aug 20 13:54:05 2019
@@ -160,6 +160,9 @@ using Allocator32CompactASVT =
     SizeClassAllocator32<AP32Compact<AddressSpaceView>>;
 using Allocator32Compact = Allocator32CompactASVT<LocalAddressSpaceView>;
 
+using Allocator32or64Compact =
+    RuntimeSelectAllocator<Allocator32Compact, Allocator64Compact>;
+
 template <class SizeClassMap>
 void TestSizeClassMap() {
   typedef SizeClassMap SCMap;
@@ -274,6 +277,13 @@ TEST(SanitizerCommon, SizeClassAllocator
   TestSizeClassAllocator<Allocator64Compact>();
 }
 
+TEST(SanitizerCommon, SizeClassAllocator32or64Compact) {
+  Allocator32or64Compact::UseAllocator1 = false;
+  TestSizeClassAllocator<Allocator32or64Compact>();
+  Allocator32or64Compact::UseAllocator1 = true;
+  TestSizeClassAllocator<Allocator32or64Compact>();
+}
+
 TEST(SanitizerCommon, SizeClassAllocator64Dense) {
   TestSizeClassAllocator<Allocator64Dense>();
 }
@@ -357,6 +367,12 @@ TEST(SanitizerCommon, SizeClassAllocator
 TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
   SizeClassAllocatorMetadataStress<Allocator64Compact>();
 }
+TEST(SanitizerCommon, SizeClassAllocator32or64CompactMetadataStress) {
+  Allocator32or64Compact::UseAllocator1 = false;
+  SizeClassAllocatorMetadataStress<Allocator32or64Compact>();
+  Allocator32or64Compact::UseAllocator1 = true;
+  SizeClassAllocatorMetadataStress<Allocator32or64Compact>();
+}
 #endif
 
 #endif
@@ -404,6 +420,12 @@ TEST(SanitizerCommon, SizeClassAllocator
 TEST(SanitizerCommon, SizeClassAllocator64CompactGetBlockBegin) {
   SizeClassAllocatorGetBlockBeginStress<Allocator64Compact>(1ULL << 33);
 }
+TEST(SanitizerCommon, SizeClassAllocator32or64CompactGetBlockBegin) {
+  Allocator32or64Compact::UseAllocator1 = false;
+  SizeClassAllocatorGetBlockBeginStress<Allocator32or64Compact>(1ULL << 33);
+  Allocator32or64Compact::UseAllocator1 = true;
+  SizeClassAllocatorGetBlockBeginStress<Allocator32or64Compact>(1ULL << 33);
+}
 #endif
 TEST(SanitizerCommon, SizeClassAllocator64VeryCompactGetBlockBegin) {
   // Does not have > 4Gb for each class.
@@ -694,6 +716,12 @@ TEST(SanitizerCommon, CombinedAllocator6
 TEST(SanitizerCommon, CombinedAllocator64Compact) {
   TestCombinedAllocator<Allocator64Compact>();
 }
+TEST(SanitizerCommon, CombinedRuntimeSelectAllocator) {
+  Allocator32or64Compact::UseAllocator1 = false;
+  TestCombinedAllocator<Allocator32or64Compact>();
+  Allocator32or64Compact::UseAllocator1 = true;
+  TestCombinedAllocator<Allocator32or64Compact>();
+}
 #endif
 
 TEST(SanitizerCommon, CombinedAllocator64VeryCompact) {
@@ -755,6 +783,12 @@ TEST(SanitizerCommon, SizeClassAllocator
 TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
   TestSizeClassAllocatorLocalCache<Allocator64Compact>();
 }
+TEST(SanitizerCommon, SizeClassAllocator32or64CompactLocalCache) {
+  Allocator32or64Compact::UseAllocator1 = false;
+  TestSizeClassAllocatorLocalCache<Allocator32or64Compact>();
+  Allocator32or64Compact::UseAllocator1 = true;
+  TestSizeClassAllocatorLocalCache<Allocator32or64Compact>();
+}
 #endif
 TEST(SanitizerCommon, SizeClassAllocator64VeryCompactLocalCache) {
   TestSizeClassAllocatorLocalCache<Allocator64VeryCompact>();
@@ -1333,6 +1367,12 @@ TEST(SanitizerCommon, SizeClassAllocator
 TEST(SanitizerCommon, SizeClassAllocator64CompactReleaseFreeMemoryToOS) {
   TestReleaseFreeMemoryToOS<Allocator64Compact>();
 }
+TEST(SanitizerCommon, SizeClassAllocator32or64CompactReleaseFreeMemoryToOS) {
+  Allocator32or64Compact::UseAllocator1 = false;
+  TestReleaseFreeMemoryToOS<Allocator32or64Compact>();
+  Allocator32or64Compact::UseAllocator1 = true;
+  TestReleaseFreeMemoryToOS<Allocator32or64Compact>();
+}
 
 TEST(SanitizerCommon, SizeClassAllocator64VeryCompactReleaseFreeMemoryToOS) {
   TestReleaseFreeMemoryToOS<Allocator64VeryCompact>();




More information about the llvm-commits mailing list