[compiler-rt] r279495 - [sanitizer] allocator: split the local cache class into two, one for 32-bit allocator and one for 64-bit one. NFC. The two imlementations will diverge in the following changes.
Kostya Serebryany via llvm-commits
llvm-commits at lists.llvm.org
Mon Aug 22 17:30:43 PDT 2016
Author: kcc
Date: Mon Aug 22 19:30:43 2016
New Revision: 279495
URL: http://llvm.org/viewvc/llvm-project?rev=279495&view=rev
Log:
[sanitizer] allocator: split the local cache class into two, one for 32-bit allocator and one for 64-bit one. NFC. The two imlementations will diverge in the following changes.
Modified:
compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_local_cache.h
compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary32.h
compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary64.h
compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_size_class_map.h
Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_local_cache.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_local_cache.h?rev=279495&r1=279494&r2=279495&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_local_cache.h (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_local_cache.h Mon Aug 22 19:30:43 2016
@@ -18,7 +18,155 @@
// or SizeClassAllocator32. Since the typical use of this class is to have one
// object per thread in TLS, is has to be POD.
template<class SizeClassAllocator>
-struct SizeClassAllocatorLocalCache {
+struct SizeClassAllocatorLocalCache
+ : SizeClassAllocator::AllocatorCache {
+};
+
+// Cache used by SizeClassAllocator64.
+template <class SizeClassAllocator>
+struct SizeClassAllocator64LocalCache {
+ typedef SizeClassAllocator Allocator;
+ typedef typename Allocator::TransferBatch TransferBatch;
+ static const uptr kNumClasses = SizeClassAllocator::kNumClasses;
+
+ void Init(AllocatorGlobalStats *s) {
+ stats_.Init();
+ if (s)
+ s->Register(&stats_);
+ }
+
+ void Destroy(SizeClassAllocator *allocator, AllocatorGlobalStats *s) {
+ Drain(allocator);
+ if (s)
+ s->Unregister(&stats_);
+ }
+
+ void *Allocate(SizeClassAllocator *allocator, uptr class_id) {
+ CHECK_NE(class_id, 0UL);
+ CHECK_LT(class_id, kNumClasses);
+ stats_.Add(AllocatorStatAllocated, Allocator::ClassIdToSize(class_id));
+ PerClass *c = &per_class_[class_id];
+ if (UNLIKELY(c->count == 0))
+ Refill(allocator, class_id);
+ void *res = c->batch[--c->count];
+ PREFETCH(c->batch[c->count - 1]);
+ return res;
+ }
+
+ void Deallocate(SizeClassAllocator *allocator, uptr class_id, void *p) {
+ CHECK_NE(class_id, 0UL);
+ CHECK_LT(class_id, kNumClasses);
+ // If the first allocator call on a new thread is a deallocation, then
+ // max_count will be zero, leading to check failure.
+ InitCache();
+ stats_.Sub(AllocatorStatAllocated, Allocator::ClassIdToSize(class_id));
+ PerClass *c = &per_class_[class_id];
+ CHECK_NE(c->max_count, 0UL);
+ if (UNLIKELY(c->count == c->max_count))
+ Drain(allocator, class_id);
+ c->batch[c->count++] = p;
+ }
+
+ void Drain(SizeClassAllocator *allocator) {
+ for (uptr class_id = 0; class_id < kNumClasses; class_id++) {
+ PerClass *c = &per_class_[class_id];
+ while (c->count > 0)
+ Drain(allocator, class_id);
+ }
+ }
+
+ // private:
+ typedef typename SizeClassAllocator::SizeClassMapT SizeClassMap;
+ struct PerClass {
+ uptr count;
+ uptr max_count;
+ void *batch[2 * TransferBatch::kMaxNumCached];
+ };
+ PerClass per_class_[kNumClasses];
+ AllocatorStats stats_;
+
+ void InitCache() {
+ if (per_class_[1].max_count)
+ return;
+ for (uptr i = 0; i < kNumClasses; i++) {
+ PerClass *c = &per_class_[i];
+ c->max_count = 2 * TransferBatch::MaxCached(i);
+ }
+ }
+
+ // TransferBatch class is declared in SizeClassAllocator.
+ // We transfer chunks between central and thread-local free lists in batches.
+ // For small size classes we allocate batches separately.
+ // For large size classes we may use one of the chunks to store the batch.
+ // sizeof(TransferBatch) must be a power of 2 for more efficient allocation.
+
+ // If kUseSeparateSizeClassForBatch is true,
+ // all TransferBatch objects are allocated from kBatchClassID
+ // size class (except for those that are needed for kBatchClassID itself).
+ // The goal is to have TransferBatches in a totally different region of RAM
+ // to improve security and allow more efficient RAM reclamation.
+ // This is experimental and may currently increase memory usage by up to 3%
+ // in extreme cases.
+ static const bool kUseSeparateSizeClassForBatch = false;
+
+ static uptr SizeClassForTransferBatch(uptr class_id) {
+ if (kUseSeparateSizeClassForBatch)
+ return class_id == SizeClassMap::kBatchClassID
+ ? 0
+ : SizeClassMap::kBatchClassID;
+ if (Allocator::ClassIdToSize(class_id) <
+ TransferBatch::AllocationSizeRequiredForNElements(
+ TransferBatch::MaxCached(class_id)))
+ return SizeClassMap::ClassID(sizeof(TransferBatch));
+ return 0;
+ }
+
+ // Returns a TransferBatch suitable for class_id.
+ // For small size classes allocates the batch from the allocator.
+ // For large size classes simply returns b.
+ TransferBatch *CreateBatch(uptr class_id, SizeClassAllocator *allocator,
+ TransferBatch *b) {
+ if (uptr batch_class_id = SizeClassForTransferBatch(class_id))
+ return (TransferBatch*)Allocate(allocator, batch_class_id);
+ return b;
+ }
+
+ // Destroys TransferBatch b.
+ // For small size classes deallocates b to the allocator.
+ // Does notthing for large size classes.
+ void DestroyBatch(uptr class_id, SizeClassAllocator *allocator,
+ TransferBatch *b) {
+ if (uptr batch_class_id = SizeClassForTransferBatch(class_id))
+ Deallocate(allocator, batch_class_id, b);
+ }
+
+ NOINLINE void Refill(SizeClassAllocator *allocator, uptr class_id) {
+ InitCache();
+ PerClass *c = &per_class_[class_id];
+ TransferBatch *b = allocator->AllocateBatch(&stats_, this, class_id);
+ CHECK_GT(b->Count(), 0);
+ b->CopyToArray(c->batch);
+ c->count = b->Count();
+ DestroyBatch(class_id, allocator, b);
+ }
+
+ NOINLINE void Drain(SizeClassAllocator *allocator, uptr class_id) {
+ InitCache();
+ PerClass *c = &per_class_[class_id];
+ uptr cnt = Min(c->max_count / 2, c->count);
+ uptr first_idx_to_drain = c->count - cnt;
+ TransferBatch *b = CreateBatch(
+ class_id, allocator, (TransferBatch *)c->batch[first_idx_to_drain]);
+ b->SetFromArray(allocator->GetRegionBeginBySizeClass(class_id),
+ &c->batch[first_idx_to_drain], cnt);
+ c->count -= cnt;
+ allocator->DeallocateBatch(&stats_, class_id, b);
+ }
+};
+
+// Cache used by SizeClassAllocator32.
+template <class SizeClassAllocator>
+struct SizeClassAllocator32LocalCache {
typedef SizeClassAllocator Allocator;
typedef typename Allocator::TransferBatch TransferBatch;
static const uptr kNumClasses = SizeClassAllocator::kNumClasses;
@@ -157,3 +305,4 @@ struct SizeClassAllocatorLocalCache {
allocator->DeallocateBatch(&stats_, class_id, b);
}
};
+
Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary32.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary32.h?rev=279495&r1=279494&r2=279495&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary32.h (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary32.h Mon Aug 22 19:30:43 2016
@@ -14,6 +14,8 @@
#error This file must be included inside sanitizer_allocator.h
#endif
+template<class SizeClassAllocator> struct SizeClassAllocator32LocalCache;
+
// SizeClassAllocator32 -- allocator for 32-bit address space.
// This allocator can theoretically be used on 64-bit arch, but there it is less
// efficient than SizeClassAllocator64.
@@ -88,7 +90,7 @@ class SizeClassAllocator32 {
typedef SizeClassAllocator32<kSpaceBeg, kSpaceSize, kMetadataSize,
SizeClassMap, kRegionSizeLog, ByteMap, MapUnmapCallback> ThisT;
- typedef SizeClassAllocatorLocalCache<ThisT> AllocatorCache;
+ typedef SizeClassAllocator32LocalCache<ThisT> AllocatorCache;
void Init() {
possible_regions.TestOnlyInit();
Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary64.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary64.h?rev=279495&r1=279494&r2=279495&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary64.h (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_primary64.h Mon Aug 22 19:30:43 2016
@@ -14,6 +14,8 @@
#error This file must be included inside sanitizer_allocator.h
#endif
+template<class SizeClassAllocator> struct SizeClassAllocator64LocalCache;
+
// SizeClassAllocator64 -- allocator for 64-bit address space.
//
// Space: a portion of address space of kSpaceSize bytes starting at SpaceBeg.
@@ -95,7 +97,7 @@ class SizeClassAllocator64 {
typedef SizeClassAllocator64<kSpaceBeg, kSpaceSize, kMetadataSize,
SizeClassMap, MapUnmapCallback> ThisT;
- typedef SizeClassAllocatorLocalCache<ThisT> AllocatorCache;
+ typedef SizeClassAllocator64LocalCache<ThisT> AllocatorCache;
void Init() {
uptr TotalSpaceSize = kSpaceSize + AdditionalSize();
Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_size_class_map.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_size_class_map.h?rev=279495&r1=279494&r2=279495&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_size_class_map.h (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_size_class_map.h Mon Aug 22 19:30:43 2016
@@ -188,4 +188,3 @@ class SizeClassMap {
typedef SizeClassMap<17, 128, 16> DefaultSizeClassMap;
typedef SizeClassMap<17, 64, 14> CompactSizeClassMap;
-template<class SizeClassAllocator> struct SizeClassAllocatorLocalCache;
More information about the llvm-commits
mailing list