[PATCH] [lsan] Use the fast version of GetBlockBegin for leak checking in LSan and ASan.

Sergey Matveev earthdok at google.com
Fri May 31 03:54:39 PDT 2013


Hi kcc,

http://llvm-reviews.chandlerc.com/D899

Files:
  lib/asan/asan_allocator2.cc
  lib/lsan/lsan_allocator.cc
  lib/sanitizer_common/sanitizer_allocator.h

Index: lib/asan/asan_allocator2.cc
===================================================================
--- lib/asan/asan_allocator2.cc
+++ lib/asan/asan_allocator2.cc
@@ -528,23 +528,22 @@
   return new_ptr;
 }
 
-static AsanChunk *GetAsanChunkByAddr(uptr p) {
-  void *ptr = reinterpret_cast<void *>(p);
-  uptr alloc_beg = reinterpret_cast<uptr>(allocator.GetBlockBegin(ptr));
+// Assumes alloc_beg == allocator.GetBlockBegin(alloc_beg).
+static AsanChunk *GetAsanChunk(void *alloc_beg) {
   if (!alloc_beg) return 0;
   uptr *memalign_magic = reinterpret_cast<uptr *>(alloc_beg);
   if (memalign_magic[0] == kMemalignMagic) {
     AsanChunk *m = reinterpret_cast<AsanChunk *>(memalign_magic[1]);
     CHECK(m->from_memalign);
     return m;
   }
-  if (!allocator.FromPrimary(ptr)) {
-    uptr *meta = reinterpret_cast<uptr *>(
-        allocator.GetMetaData(reinterpret_cast<void *>(alloc_beg)));
+  if (!allocator.FromPrimary(alloc_beg)) {
+    uptr *meta = reinterpret_cast<uptr *>(allocator.GetMetaData(alloc_beg));
     AsanChunk *m = reinterpret_cast<AsanChunk *>(meta[1]);
     return m;
   }
-  uptr actual_size = allocator.GetActuallyAllocatedSize(ptr);
+  uptr actual_size =
+      allocator.GetActuallyAllocatedSize(alloc_beg);
   CHECK_LE(actual_size, SizeClassMap::kMaxSize);
   // We know the actually allocted size, but we don't know the redzone size.
   // Just try all possible redzone sizes.
@@ -554,11 +553,23 @@
     if (ComputeRZLog(max_possible_size) != rz_log)
       continue;
     return reinterpret_cast<AsanChunk *>(
-        alloc_beg + rz_size - kChunkHeaderSize);
+        reinterpret_cast<uptr>(alloc_beg) + rz_size - kChunkHeaderSize);
   }
   return 0;
 }
 
+static AsanChunk *GetAsanChunkByAddr(uptr p) {
+  void *alloc_beg = allocator.GetBlockBegin(reinterpret_cast<void *>(p));
+  return GetAsanChunk(alloc_beg);
+}
+
+// Allocator must be locked when this function is called.
+static AsanChunk *GetAsanChunkByAddrFastLocked(uptr p) {
+  void *alloc_beg =
+      allocator.GetBlockBeginFastLocked(reinterpret_cast<void *>(p));
+  return GetAsanChunk(alloc_beg);
+}
+
 static uptr AllocationSize(uptr p) {
   AsanChunk *m = GetAsanChunkByAddr(p);
   if (!m) return 0;
@@ -721,16 +732,17 @@
 
 void *PointsIntoChunk(void* p) {
   uptr addr = reinterpret_cast<uptr>(p);
-  __asan::AsanChunk *m = __asan::GetAsanChunkByAddr(addr);
+  __asan::AsanChunk *m = __asan::GetAsanChunkByAddrFastLocked(addr);
   if (!m) return 0;
   uptr chunk = m->Beg();
   if ((m->chunk_state == __asan::CHUNK_ALLOCATED) && m->AddrIsInside(addr))
     return reinterpret_cast<void *>(chunk);
   return 0;
 }
 
 void *GetUserBegin(void *p) {
-  __asan::AsanChunk *m = __asan::GetAsanChunkByAddr(reinterpret_cast<uptr>(p));
+  __asan::AsanChunk *m =
+      __asan::GetAsanChunkByAddrFastLocked(reinterpret_cast<uptr>(p));
   CHECK(m);
   return reinterpret_cast<void *>(m->Beg());
 }
Index: lib/lsan/lsan_allocator.cc
===================================================================
--- lib/lsan/lsan_allocator.cc
+++ lib/lsan/lsan_allocator.cc
@@ -133,8 +133,7 @@
 }
 
 void *PointsIntoChunk(void* p) {
-  if (!allocator.PointerIsMine(p)) return 0;
-  void *chunk = allocator.GetBlockBegin(p);
+  void *chunk = allocator.GetBlockBeginFastLocked(p);
   if (!chunk) return 0;
   // LargeMmapAllocator considers pointers to the meta-region of a chunk to be
   // valid, but we don't want that.
Index: lib/sanitizer_common/sanitizer_allocator.h
===================================================================
--- lib/sanitizer_common/sanitizer_allocator.h
+++ lib/sanitizer_common/sanitizer_allocator.h
@@ -1043,10 +1043,9 @@
     return GetUser(h);
   }
 
-  // This function does the same as GetBlockBegin, but much faster.
-  // It may be called only in a single-threaded context, e.g. when all other
-  // threads are suspended or joined.
-  void *GetBlockBeginFastSingleThreaded(void *ptr) {
+  // This function does the same as GetBlockBegin, but is much faster.
+  // Must be called with the allocator locked.
+  void *GetBlockBeginFastLocked(void *ptr) {
     uptr p = reinterpret_cast<uptr>(ptr);
     uptr n = n_chunks_;
     if (!n) return 0;
@@ -1238,6 +1237,14 @@
     return secondary_.GetBlockBegin(p);
   }
 
+  // This function does the same as GetBlockBegin, but is much faster.
+  // Must be called with the allocator locked.
+  void *GetBlockBeginFastLocked(void *p) {
+    if (primary_.PointerIsMine(p))
+      return primary_.GetBlockBegin(p);
+    return secondary_.GetBlockBeginFastLocked(p);
+  }
+
   uptr GetActuallyAllocatedSize(void *p) {
     if (primary_.PointerIsMine(p))
       return primary_.GetActuallyAllocatedSize(p);
-------------- next part --------------
A non-text attachment was scrubbed...
Name: D899.1.patch
Type: text/x-patch
Size: 4671 bytes
Desc: not available
URL: <http://lists.llvm.org/pipermail/llvm-commits/attachments/20130531/7654931a/attachment.bin>


More information about the llvm-commits mailing list