[compiler-rt] r224148 - [Sanitizer] Introduce Allocator::may_return_null bool flag.

Alexey Samsonov vonosmas at gmail.com
Fri Dec 12 12:07:35 PST 2014


Author: samsonov
Date: Fri Dec 12 14:07:35 2014
New Revision: 224148

URL: http://llvm.org/viewvc/llvm-project?rev=224148&view=rev
Log:
[Sanitizer] Introduce Allocator::may_return_null bool flag.

Summary:
Turn "allocator_may_return_null" common flag into an
Allocator::may_return_null bool flag. We want to make sure
that common flags are immutable after initialization. There
are cases when we want to change this flag in the allocator
at runtime: e.g. in unit tests and during ASan activation
on Android.

Test Plan: regression test suite, real-life applications

Reviewers: kcc, eugenis

Subscribers: llvm-commits

Differential Revision: http://reviews.llvm.org/D6623

Modified:
    compiler-rt/trunk/lib/asan/asan_activation.cc
    compiler-rt/trunk/lib/asan/asan_allocator.h
    compiler-rt/trunk/lib/asan/asan_allocator2.cc
    compiler-rt/trunk/lib/asan/asan_rtl.cc
    compiler-rt/trunk/lib/lsan/lsan_allocator.cc
    compiler-rt/trunk/lib/msan/msan.h
    compiler-rt/trunk/lib/msan/msan_allocator.cc
    compiler-rt/trunk/lib/msan/msan_interceptors.cc
    compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.cc
    compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.h
    compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_allocator_test.cc
    compiler-rt/trunk/lib/tsan/rtl/tsan_interceptors.cc
    compiler-rt/trunk/lib/tsan/rtl/tsan_mman.cc
    compiler-rt/trunk/lib/tsan/rtl/tsan_mman.h

Modified: compiler-rt/trunk/lib/asan/asan_activation.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_activation.cc?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_activation.cc (original)
+++ compiler-rt/trunk/lib/asan/asan_activation.cc Fri Dec 12 14:07:35 2014
@@ -60,29 +60,26 @@ void AsanActivate() {
 
   // Restore flag values.
   // FIXME: this is not atomic, and there may be other threads alive.
-  flags()->quarantine_size = asan_deactivated_flags.quarantine_size;
   flags()->max_redzone = asan_deactivated_flags.max_redzone;
   flags()->poison_heap = asan_deactivated_flags.poison_heap;
   common_flags()->malloc_context_size =
       asan_deactivated_flags.malloc_context_size;
   flags()->alloc_dealloc_mismatch =
       asan_deactivated_flags.alloc_dealloc_mismatch;
-  common_flags()->allocator_may_return_null =
-      asan_deactivated_flags.allocator_may_return_null;
 
   ParseExtraActivationFlags();
 
-  ReInitializeAllocator();
+  ReInitializeAllocator(asan_deactivated_flags.allocator_may_return_null,
+                        asan_deactivated_flags.quarantine_size);
 
   asan_is_deactivated = false;
-  VReport(
-      1,
-      "quarantine_size %d, max_redzone %d, poison_heap %d, "
-      "malloc_context_size %d, alloc_dealloc_mismatch %d, "
-      "allocator_may_return_null %d\n",
-      flags()->quarantine_size, flags()->max_redzone, flags()->poison_heap,
-      common_flags()->malloc_context_size, flags()->alloc_dealloc_mismatch,
-      common_flags()->allocator_may_return_null);
+  VReport(1, "quarantine_size %d, max_redzone %d, poison_heap %d, "
+             "malloc_context_size %d, alloc_dealloc_mismatch %d, "
+             "allocator_may_return_null %d\n",
+          asan_deactivated_flags.quarantine_size, flags()->max_redzone,
+          flags()->poison_heap, common_flags()->malloc_context_size,
+          flags()->alloc_dealloc_mismatch,
+          asan_deactivated_flags.allocator_may_return_null);
 }
 
 }  // namespace __asan

Modified: compiler-rt/trunk/lib/asan/asan_allocator.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_allocator.h?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_allocator.h (original)
+++ compiler-rt/trunk/lib/asan/asan_allocator.h Fri Dec 12 14:07:35 2014
@@ -31,8 +31,8 @@ enum AllocType {
 static const uptr kNumberOfSizeClasses = 255;
 struct AsanChunk;
 
-void InitializeAllocator();
-void ReInitializeAllocator();
+void InitializeAllocator(bool may_return_null, uptr quarantine_size);
+void ReInitializeAllocator(bool may_return_null, uptr quarantine_size);
 
 class AsanChunkView {
  public:

Modified: compiler-rt/trunk/lib/asan/asan_allocator2.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_allocator2.cc?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_allocator2.cc (original)
+++ compiler-rt/trunk/lib/asan/asan_allocator2.cc Fri Dec 12 14:07:35 2014
@@ -253,13 +253,14 @@ struct QuarantineCallback {
   AllocatorCache *cache_;
 };
 
-void InitializeAllocator() {
-  allocator.Init();
-  quarantine.Init((uptr)flags()->quarantine_size, kMaxThreadLocalQuarantine);
+void InitializeAllocator(bool may_return_null, uptr quarantine_size) {
+  allocator.Init(may_return_null);
+  quarantine.Init(quarantine_size, kMaxThreadLocalQuarantine);
 }
 
-void ReInitializeAllocator() {
-  quarantine.Init((uptr)flags()->quarantine_size, kMaxThreadLocalQuarantine);
+void ReInitializeAllocator(bool may_return_null, uptr quarantine_size) {
+  allocator.SetMayReturnNull(may_return_null);
+  quarantine.Init(quarantine_size, kMaxThreadLocalQuarantine);
 }
 
 static void *Allocate(uptr size, uptr alignment, BufferedStackTrace *stack,
@@ -297,7 +298,7 @@ static void *Allocate(uptr size, uptr al
   if (size > kMaxAllowedMallocSize || needed_size > kMaxAllowedMallocSize) {
     Report("WARNING: AddressSanitizer failed to allocate %p bytes\n",
            (void*)size);
-    return AllocatorReturnNull();
+    return allocator.ReturnNullOrDie();
   }
 
   AsanThread *t = GetCurrentThread();
@@ -598,7 +599,7 @@ void *asan_malloc(uptr size, BufferedSta
 
 void *asan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) {
   if (CallocShouldReturnNullDueToOverflow(size, nmemb))
-    return AllocatorReturnNull();
+    return allocator.ReturnNullOrDie();
   void *ptr = Allocate(nmemb * size, 8, stack, FROM_MALLOC, false);
   // If the memory comes from the secondary allocator no need to clear it
   // as it comes directly from mmap.

Modified: compiler-rt/trunk/lib/asan/asan_rtl.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_rtl.cc?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_rtl.cc (original)
+++ compiler-rt/trunk/lib/asan/asan_rtl.cc Fri Dec 12 14:07:35 2014
@@ -658,7 +658,8 @@ static void AsanInitInternal() {
   AsanTSDInit(PlatformTSDDtor);
   InstallDeadlySignalHandlers(AsanOnSIGSEGV);
 
-  InitializeAllocator();
+  InitializeAllocator(common_flags()->allocator_may_return_null,
+                      flags()->quarantine_size);
 
   // On Linux AsanThread::ThreadStart() calls malloc() that's why asan_inited
   // should be set to 1 prior to initializing the threads.

Modified: compiler-rt/trunk/lib/lsan/lsan_allocator.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/lsan/lsan_allocator.cc?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/lsan/lsan_allocator.cc (original)
+++ compiler-rt/trunk/lib/lsan/lsan_allocator.cc Fri Dec 12 14:07:35 2014
@@ -47,7 +47,7 @@ static Allocator allocator;
 static THREADLOCAL AllocatorCache cache;
 
 void InitializeAllocator() {
-  allocator.Init();
+  allocator.Init(common_flags()->allocator_may_return_null);
 }
 
 void AllocatorThreadFinish() {

Modified: compiler-rt/trunk/lib/msan/msan.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/msan/msan.h?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/msan/msan.h (original)
+++ compiler-rt/trunk/lib/msan/msan.h Fri Dec 12 14:07:35 2014
@@ -125,6 +125,7 @@ char *GetProcSelfMaps();
 void InitializeInterceptors();
 
 void MsanAllocatorThreadFinish();
+void *MsanCalloc(StackTrace *stack, uptr nmemb, uptr size);
 void *MsanReallocate(StackTrace *stack, void *oldp, uptr size,
                      uptr alignment, bool zeroise);
 void MsanDeallocate(StackTrace *stack, void *ptr);

Modified: compiler-rt/trunk/lib/msan/msan_allocator.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/msan/msan_allocator.cc?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/msan/msan_allocator.cc (original)
+++ compiler-rt/trunk/lib/msan/msan_allocator.cc Fri Dec 12 14:07:35 2014
@@ -73,7 +73,7 @@ static inline void Init() {
   if (inited) return;
   __msan_init();
   inited = true;  // this must happen before any threads are created.
-  allocator.Init();
+  allocator.Init(common_flags()->allocator_may_return_null);
 }
 
 AllocatorCache *GetAllocatorCache(MsanThreadLocalMallocStorage *ms) {
@@ -92,7 +92,7 @@ static void *MsanAllocate(StackTrace *st
   if (size > kMaxAllowedMallocSize) {
     Report("WARNING: MemorySanitizer failed to allocate %p bytes\n",
            (void *)size);
-    return AllocatorReturnNull();
+    return allocator.ReturnNullOrDie();
   }
   MsanThread *t = GetCurrentThread();
   void *allocated;
@@ -147,6 +147,13 @@ void MsanDeallocate(StackTrace *stack, v
   }
 }
 
+void *MsanCalloc(StackTrace *stack, uptr nmemb, uptr size) {
+  Init();
+  if (CallocShouldReturnNullDueToOverflow(size, nmemb))
+    return allocator.ReturnNullOrDie();
+  return MsanReallocate(stack, 0, nmemb * size, sizeof(u64), true);
+}
+
 void *MsanReallocate(StackTrace *stack, void *old_p, uptr new_size,
                      uptr alignment, bool zeroise) {
   if (!old_p)

Modified: compiler-rt/trunk/lib/msan/msan_interceptors.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/msan/msan_interceptors.cc?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/msan/msan_interceptors.cc (original)
+++ compiler-rt/trunk/lib/msan/msan_interceptors.cc Fri Dec 12 14:07:35 2014
@@ -925,10 +925,8 @@ INTERCEPTOR(SSIZE_T, recvfrom, int fd, v
 }
 
 INTERCEPTOR(void *, calloc, SIZE_T nmemb, SIZE_T size) {
-  if (CallocShouldReturnNullDueToOverflow(size, nmemb))
-    return AllocatorReturnNull();
   GET_MALLOC_STACK_TRACE;
-  if (!msan_inited) {
+  if (UNLIKELY(!msan_inited)) {
     // Hack: dlsym calls calloc before REAL(calloc) is retrieved from dlsym.
     const SIZE_T kCallocPoolSize = 1024;
     static uptr calloc_memory_for_dlsym[kCallocPoolSize];
@@ -939,7 +937,7 @@ INTERCEPTOR(void *, calloc, SIZE_T nmemb
     CHECK(allocated < kCallocPoolSize);
     return mem;
   }
-  return MsanReallocate(&stack, 0, nmemb * size, sizeof(u64), true);
+  return MsanCalloc(&stack, nmemb, size);
 }
 
 INTERCEPTOR(void *, realloc, void *ptr, SIZE_T size) {

Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.cc?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.cc (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.cc Fri Dec 12 14:07:35 2014
@@ -14,7 +14,6 @@
 #include "sanitizer_allocator.h"
 #include "sanitizer_allocator_internal.h"
 #include "sanitizer_common.h"
-#include "sanitizer_flags.h"
 
 namespace __sanitizer {
 
@@ -61,7 +60,7 @@ InternalAllocator *internal_allocator()
     SpinMutexLock l(&internal_alloc_init_mu);
     if (atomic_load(&internal_allocator_initialized, memory_order_relaxed) ==
         0) {
-      internal_allocator_instance->Init();
+      internal_allocator_instance->Init(/* may_return_null*/ false);
       atomic_store(&internal_allocator_initialized, 1, memory_order_release);
     }
   }
@@ -140,14 +139,12 @@ bool CallocShouldReturnNullDueToOverflow
   return (max / size) < n;
 }
 
-void *AllocatorReturnNull() {
-  if (common_flags()->allocator_may_return_null)
-    return 0;
+void NORETURN ReportAllocatorCannotReturnNull() {
   Report("%s's allocator is terminating the process instead of returning 0\n",
          SanitizerToolName);
   Report("If you don't like this behavior set allocator_may_return_null=1\n");
   CHECK(0);
-  return 0;
+  Die();
 }
 
 }  // namespace __sanitizer

Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.h?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.h (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_allocator.h Fri Dec 12 14:07:35 2014
@@ -23,8 +23,8 @@
 
 namespace __sanitizer {
 
-// Depending on allocator_may_return_null either return 0 or crash.
-void *AllocatorReturnNull();
+// Prints error message and kills the program.
+void NORETURN ReportAllocatorCannotReturnNull();
 
 // SizeClassMap maps allocation sizes into size classes and back.
 // Class 0 corresponds to size 0.
@@ -1002,9 +1002,10 @@ struct SizeClassAllocatorLocalCache {
 template <class MapUnmapCallback = NoOpMapUnmapCallback>
 class LargeMmapAllocator {
  public:
-  void Init() {
+  void Init(bool may_return_null) {
     internal_memset(this, 0, sizeof(*this));
     page_size_ = GetPageSizeCached();
+    atomic_store(&may_return_null_, may_return_null, memory_order_relaxed);
   }
 
   void *Allocate(AllocatorStats *stat, uptr size, uptr alignment) {
@@ -1012,7 +1013,9 @@ class LargeMmapAllocator {
     uptr map_size = RoundUpMapSize(size);
     if (alignment > page_size_)
       map_size += alignment;
-    if (map_size < size) return AllocatorReturnNull();  // Overflow.
+    // Overflow.
+    if (map_size < size)
+      return ReturnNullOrDie();
     uptr map_beg = reinterpret_cast<uptr>(
         MmapOrDie(map_size, "LargeMmapAllocator"));
     CHECK(IsAligned(map_beg, page_size_));
@@ -1048,6 +1051,16 @@ class LargeMmapAllocator {
     return reinterpret_cast<void*>(res);
   }
 
+  void *ReturnNullOrDie() {
+    if (atomic_load(&may_return_null_, memory_order_acquire))
+      return 0;
+    ReportAllocatorCannotReturnNull();
+  }
+
+  void SetMayReturnNull(bool may_return_null) {
+    atomic_store(&may_return_null_, may_return_null, memory_order_release);
+  }
+
   void Deallocate(AllocatorStats *stat, void *p) {
     Header *h = GetHeader(p);
     {
@@ -1226,6 +1239,7 @@ class LargeMmapAllocator {
   struct Stats {
     uptr n_allocs, n_frees, currently_allocated, max_allocated, by_size_log[64];
   } stats;
+  atomic_uint8_t may_return_null_;
   SpinMutex mutex_;
 };
 
@@ -1239,10 +1253,11 @@ template <class PrimaryAllocator, class
           class SecondaryAllocator>  // NOLINT
 class CombinedAllocator {
  public:
-  void Init() {
+  void Init(bool may_return_null) {
     primary_.Init();
-    secondary_.Init();
+    secondary_.Init(may_return_null);
     stats_.Init();
+    atomic_store(&may_return_null_, may_return_null, memory_order_relaxed);
   }
 
   void *Allocate(AllocatorCache *cache, uptr size, uptr alignment,
@@ -1251,7 +1266,7 @@ class CombinedAllocator {
     if (size == 0)
       size = 1;
     if (size + alignment < size)
-      return AllocatorReturnNull();
+      return ReturnNullOrDie();
     if (alignment > 8)
       size = RoundUpTo(size, alignment);
     void *res;
@@ -1267,6 +1282,17 @@ class CombinedAllocator {
     return res;
   }
 
+  void *ReturnNullOrDie() {
+    if (atomic_load(&may_return_null_, memory_order_acquire))
+      return 0;
+    ReportAllocatorCannotReturnNull();
+  }
+
+  void SetMayReturnNull(bool may_return_null) {
+    secondary_.SetMayReturnNull(may_return_null);
+    atomic_store(&may_return_null_, may_return_null, memory_order_release);
+  }
+
   void Deallocate(AllocatorCache *cache, void *p) {
     if (!p) return;
     if (primary_.PointerIsMine(p))
@@ -1379,6 +1405,7 @@ class CombinedAllocator {
   PrimaryAllocator primary_;
   SecondaryAllocator secondary_;
   AllocatorGlobalStats stats_;
+  atomic_uint8_t may_return_null_;
 };
 
 // Returns true if calloc(size, n) should return 0 due to overflow in size*n.

Modified: compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_allocator_test.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_allocator_test.cc?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_allocator_test.cc (original)
+++ compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_allocator_test.cc Fri Dec 12 14:07:35 2014
@@ -14,7 +14,6 @@
 #include "sanitizer_common/sanitizer_allocator.h"
 #include "sanitizer_common/sanitizer_allocator_internal.h"
 #include "sanitizer_common/sanitizer_common.h"
-#include "sanitizer_common/sanitizer_flags.h"
 
 #include "sanitizer_test_utils.h"
 #include "sanitizer_pthread_wrappers.h"
@@ -299,7 +298,7 @@ TEST(SanitizerCommon, LargeMmapAllocator
   TestMapUnmapCallback::map_count = 0;
   TestMapUnmapCallback::unmap_count = 0;
   LargeMmapAllocator<TestMapUnmapCallback> a;
-  a.Init();
+  a.Init(/* may_return_null */ false);
   AllocatorStats stats;
   stats.Init();
   void *x = a.Allocate(&stats, 1 << 20, 1);
@@ -333,7 +332,7 @@ TEST(SanitizerCommon, SizeClassAllocator
 #if !defined(_WIN32)  // FIXME: This currently fails on Windows.
 TEST(SanitizerCommon, LargeMmapAllocator) {
   LargeMmapAllocator<> a;
-  a.Init();
+  a.Init(/* may_return_null */ false);
   AllocatorStats stats;
   stats.Init();
 
@@ -415,25 +414,22 @@ void TestCombinedAllocator() {
       CombinedAllocator<PrimaryAllocator, AllocatorCache, SecondaryAllocator>
       Allocator;
   Allocator *a = new Allocator;
-  a->Init();
+  a->Init(/* may_return_null */ true);
 
   AllocatorCache cache;
   memset(&cache, 0, sizeof(cache));
   a->InitCache(&cache);
 
-  bool allocator_may_return_null = common_flags()->allocator_may_return_null;
-  common_flags()->allocator_may_return_null = true;
   EXPECT_EQ(a->Allocate(&cache, -1, 1), (void*)0);
   EXPECT_EQ(a->Allocate(&cache, -1, 1024), (void*)0);
   EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1), (void*)0);
   EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1024, 1024), (void*)0);
   EXPECT_EQ(a->Allocate(&cache, (uptr)-1 - 1023, 1024), (void*)0);
 
-  common_flags()->allocator_may_return_null = false;
+  // Set to false
+  a->SetMayReturnNull(false);
   EXPECT_DEATH(a->Allocate(&cache, -1, 1),
                "allocator is terminating the process");
-  // Restore the original value.
-  common_flags()->allocator_may_return_null = allocator_may_return_null;
 
   const uptr kNumAllocs = 100000;
   const uptr kNumIter = 10;
@@ -708,7 +704,7 @@ TEST(SanitizerCommon, SizeClassAllocator
 
 TEST(SanitizerCommon, LargeMmapAllocatorIteration) {
   LargeMmapAllocator<> a;
-  a.Init();
+  a.Init(/* may_return_null */ false);
   AllocatorStats stats;
   stats.Init();
 
@@ -735,7 +731,7 @@ TEST(SanitizerCommon, LargeMmapAllocator
 
 TEST(SanitizerCommon, LargeMmapAllocatorBlockBegin) {
   LargeMmapAllocator<> a;
-  a.Init();
+  a.Init(/* may_return_null */ false);
   AllocatorStats stats;
   stats.Init();
 

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_interceptors.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_interceptors.cc?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_interceptors.cc (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_interceptors.cc Fri Dec 12 14:07:35 2014
@@ -505,14 +505,10 @@ TSAN_INTERCEPTOR(void*, __libc_memalign,
 TSAN_INTERCEPTOR(void*, calloc, uptr size, uptr n) {
   if (cur_thread()->in_symbolizer)
     return __libc_calloc(size, n);
-  if (__sanitizer::CallocShouldReturnNullDueToOverflow(size, n))
-    return AllocatorReturnNull();
   void *p = 0;
   {
     SCOPED_INTERCEPTOR_RAW(calloc, size, n);
-    p = user_alloc(thr, pc, n * size);
-    if (p)
-      internal_memset(p, 0, n * size);
+    p = user_calloc(thr, pc, size, n);
   }
   invoke_malloc_hook(p, n * size);
   return p;

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_mman.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_mman.cc?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_mman.cc (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_mman.cc Fri Dec 12 14:07:35 2014
@@ -45,7 +45,7 @@ Allocator *allocator() {
 }
 
 void InitializeAllocator() {
-  allocator()->Init();
+  allocator()->Init(common_flags()->allocator_may_return_null);
 }
 
 void AllocatorThreadStart(ThreadState *thr) {
@@ -78,7 +78,7 @@ static void SignalUnsafeCall(ThreadState
 
 void *user_alloc(ThreadState *thr, uptr pc, uptr sz, uptr align, bool signal) {
   if ((sz >= (1ull << 40)) || (align >= (1ull << 40)))
-    return AllocatorReturnNull();
+    return allocator()->ReturnNullOrDie();
   void *p = allocator()->Allocate(&thr->alloc_cache, sz, align);
   if (p == 0)
     return 0;
@@ -89,6 +89,15 @@ void *user_alloc(ThreadState *thr, uptr
   return p;
 }
 
+void *user_calloc(ThreadState *thr, uptr pc, uptr size, uptr n) {
+  if (CallocShouldReturnNullDueToOverflow(size, n))
+    return allocator()->ReturnNullOrDie();
+  void *p = user_alloc(thr, pc, n * size);
+  if (p)
+    internal_memset(p, 0, n * size);
+  return p;
+}
+
 void user_free(ThreadState *thr, uptr pc, void *p, bool signal) {
   if (ctx && ctx->initialized)
     OnUserFree(thr, pc, (uptr)p, true);

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_mman.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_mman.h?rev=224148&r1=224147&r2=224148&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_mman.h (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_mman.h Fri Dec 12 14:07:35 2014
@@ -27,6 +27,7 @@ void AllocatorPrintStats();
 // For user allocations.
 void *user_alloc(ThreadState *thr, uptr pc, uptr sz,
                  uptr align = kDefaultAlignment, bool signal = true);
+void *user_calloc(ThreadState *thr, uptr pc, uptr sz, uptr n);
 // Does not accept NULL.
 void user_free(ThreadState *thr, uptr pc, void *p, bool signal = true);
 void *user_realloc(ThreadState *thr, uptr pc, void *p, uptr sz);





More information about the llvm-commits mailing list