[compiler-rt] r305590 - [Sanitizer] Remove CombinedAllocator::Allocate's 'cleared' parameter
Alex Shlyapnikov via llvm-commits
llvm-commits at lists.llvm.org
Fri Jun 16 14:00:03 PDT 2017
Author: alekseyshl
Date: Fri Jun 16 16:00:03 2017
New Revision: 305590
URL: http://llvm.org/viewvc/llvm-project?rev=305590&view=rev
Log:
[Sanitizer] Remove CombinedAllocator::Allocate's 'cleared' parameter
Summary:
CombinedAllocator::Allocate cleared parameter is not used anywhere and
seem to be obsolete.
Reviewers: eugenis
Subscribers: llvm-commits, kubamracek
Differential Revision: https://reviews.llvm.org/D34289
Modified:
compiler-rt/trunk/lib/asan/asan_allocator.cc
compiler-rt/trunk/lib/lsan/lsan_allocator.cc
compiler-rt/trunk/lib/msan/msan_allocator.cc
compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.cc
compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_combined.h
Modified: compiler-rt/trunk/lib/asan/asan_allocator.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_allocator.cc?rev=305590&r1=305589&r2=305590&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_allocator.cc (original)
+++ compiler-rt/trunk/lib/asan/asan_allocator.cc Fri Jun 16 16:00:03 2017
@@ -160,7 +160,7 @@ struct QuarantineCallback {
}
void *Allocate(uptr size) {
- return get_allocator().Allocate(cache_, size, 1, false);
+ return get_allocator().Allocate(cache_, size, 1);
}
void Deallocate(void *p) {
@@ -414,13 +414,11 @@ struct Allocator {
void *allocated;
if (t) {
AllocatorCache *cache = GetAllocatorCache(&t->malloc_storage());
- allocated =
- allocator.Allocate(cache, needed_size, 8, false);
+ allocated = allocator.Allocate(cache, needed_size, 8);
} else {
SpinMutexLock l(&fallback_mutex);
AllocatorCache *cache = &fallback_allocator_cache;
- allocated =
- allocator.Allocate(cache, needed_size, 8, false);
+ allocated = allocator.Allocate(cache, needed_size, 8);
}
if (!allocated) return allocator.ReturnNullOrDieOnOOM();
Modified: compiler-rt/trunk/lib/lsan/lsan_allocator.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/lsan/lsan_allocator.cc?rev=305590&r1=305589&r2=305590&view=diff
==============================================================================
--- compiler-rt/trunk/lib/lsan/lsan_allocator.cc (original)
+++ compiler-rt/trunk/lib/lsan/lsan_allocator.cc Fri Jun 16 16:00:03 2017
@@ -76,7 +76,7 @@ void *Allocate(const StackTrace &stack,
Report("WARNING: LeakSanitizer failed to allocate %zu bytes\n", size);
return nullptr;
}
- void *p = allocator.Allocate(GetAllocatorCache(), size, alignment, false);
+ void *p = allocator.Allocate(GetAllocatorCache(), size, alignment);
// Do not rely on the allocator to clear the memory (it's slow).
if (cleared && allocator.FromPrimary(p))
memset(p, 0, size);
Modified: compiler-rt/trunk/lib/msan/msan_allocator.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/msan/msan_allocator.cc?rev=305590&r1=305589&r2=305590&view=diff
==============================================================================
--- compiler-rt/trunk/lib/msan/msan_allocator.cc (original)
+++ compiler-rt/trunk/lib/msan/msan_allocator.cc Fri Jun 16 16:00:03 2017
@@ -145,11 +145,11 @@ static void *MsanAllocate(StackTrace *st
void *allocated;
if (t) {
AllocatorCache *cache = GetAllocatorCache(&t->malloc_storage());
- allocated = allocator.Allocate(cache, size, alignment, false);
+ allocated = allocator.Allocate(cache, size, alignment);
} else {
SpinMutexLock l(&fallback_mutex);
AllocatorCache *cache = &fallback_allocator_cache;
- allocated = allocator.Allocate(cache, size, alignment, false);
+ allocated = allocator.Allocate(cache, size, alignment);
}
Metadata *meta =
reinterpret_cast<Metadata *>(allocator.GetMetaData(allocated));
Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.cc?rev=305590&r1=305589&r2=305590&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.cc (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.cc Fri Jun 16 16:00:03 2017
@@ -108,9 +108,9 @@ static void *RawInternalAlloc(uptr size,
if (cache == 0) {
SpinMutexLock l(&internal_allocator_cache_mu);
return internal_allocator()->Allocate(&internal_allocator_cache, size,
- alignment, false);
+ alignment);
}
- return internal_allocator()->Allocate(cache, size, alignment, false);
+ return internal_allocator()->Allocate(cache, size, alignment);
}
static void *RawInternalRealloc(void *ptr, uptr size,
Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_combined.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_combined.h?rev=305590&r1=305589&r2=305590&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_combined.h (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator_combined.h Fri Jun 16 16:00:03 2017
@@ -42,8 +42,7 @@ class CombinedAllocator {
InitCommon(may_return_null, release_to_os_interval_ms);
}
- void *Allocate(AllocatorCache *cache, uptr size, uptr alignment,
- bool cleared = false) {
+ void *Allocate(AllocatorCache *cache, uptr size, uptr alignment) {
// Returning 0 on malloc(0) may break a lot of code.
if (size == 0)
size = 1;
@@ -70,11 +69,6 @@ class CombinedAllocator {
res = secondary_.Allocate(&stats_, original_size, alignment);
if (alignment > 8)
CHECK_EQ(reinterpret_cast<uptr>(res) & (alignment - 1), 0);
- // When serviced by the secondary, the chunk comes from a mmap allocation
- // and will be zero'd out anyway. We only need to clear our the chunk if
- // it was serviced by the primary, hence using the rounded up 'size'.
- if (cleared && res && from_primary)
- internal_bzero_aligned16(res, RoundUpTo(size, 16));
return res;
}
More information about the llvm-commits
mailing list