[compiler-rt] r183099 - [lsan] Add __lsan_disable() and __lsan_enable().

Sergey Matveev earthdok at google.com
Mon Jun 3 04:21:35 PDT 2013


Author: smatveev
Date: Mon Jun  3 06:21:34 2013
New Revision: 183099

URL: http://llvm.org/viewvc/llvm-project?rev=183099&view=rev
Log:
[lsan] Add __lsan_disable() and __lsan_enable().

Objects allocated after a call to __lsan_disable() will be treated as
live memory. Also add a ScopedDisabler.

Added:
    compiler-rt/trunk/include/sanitizer/lsan_interface.h
    compiler-rt/trunk/lib/lsan/lit_tests/TestCases/disabler.cc
Modified:
    compiler-rt/trunk/lib/asan/asan_allocator2.cc
    compiler-rt/trunk/lib/asan/asan_thread.cc
    compiler-rt/trunk/lib/asan/asan_thread.h
    compiler-rt/trunk/lib/lsan/lsan_allocator.cc
    compiler-rt/trunk/lib/lsan/lsan_common.cc
    compiler-rt/trunk/lib/lsan/lsan_common.h

Added: compiler-rt/trunk/include/sanitizer/lsan_interface.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/include/sanitizer/lsan_interface.h?rev=183099&view=auto
==============================================================================
--- compiler-rt/trunk/include/sanitizer/lsan_interface.h (added)
+++ compiler-rt/trunk/include/sanitizer/lsan_interface.h Mon Jun  3 06:21:34 2013
@@ -0,0 +1,39 @@
+//===-- sanitizer/lsan_interface.h ------------------------------*- C++ -*-===//
+//
+//                     The LLVM Compiler Infrastructure
+//
+// This file is distributed under the University of Illinois Open Source
+// License. See LICENSE.TXT for details.
+//
+//===----------------------------------------------------------------------===//
+//
+// This file is a part of LeakSanitizer.
+//
+// Public interface header.
+//===----------------------------------------------------------------------===//
+#ifndef SANITIZER_LSAN_INTERFACE_H
+#define SANITIZER_LSAN_INTERFACE_H
+
+#include <sanitizer/common_interface_defs.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+  // Allocations made between calls to __lsan_disable() and __lsan_enable() will
+  // be treated as non-leaks. Disable/enable pairs can be nested.
+  void __lsan_disable();
+  void __lsan_enable();
+
+#ifdef __cplusplus
+}  // extern "C"
+
+namespace __lsan {
+class ScopedDisabler {
+ public:
+  ScopedDisabler() { __lsan_disable(); }
+  ~ScopedDisabler() { __lsan_enable(); }
+};
+}  // namespace __lsan
+#endif
+
+#endif  // SANITIZER_LSAN_INTERFACE_H

Modified: compiler-rt/trunk/lib/asan/asan_allocator2.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_allocator2.cc?rev=183099&r1=183098&r2=183099&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_allocator2.cc (original)
+++ compiler-rt/trunk/lib/asan/asan_allocator2.cc Mon Jun  3 06:21:34 2013
@@ -422,6 +422,8 @@ static void *Allocate(uptr size, uptr al
     uptr fill_size = Min(size, (uptr)fl.max_malloc_fill_size);
     REAL(memset)(res, fl.malloc_fill_byte, fill_size);
   }
+  m->lsan_tag =
+      t->lsan_disabled() ? __lsan::kSuppressed : __lsan::kDirectlyLeaked;
   // Must be the last mutation of metadata in this function.
   atomic_store((atomic_uint8_t *)m, CHUNK_ALLOCATED, memory_order_release);
   ASAN_MALLOC_HOOK(res, size);
@@ -787,10 +789,27 @@ template void ForEachChunk<PrintLeakedCb
 template void ForEachChunk<CollectLeaksCb>(CollectLeaksCb const &callback);
 template void ForEachChunk<MarkIndirectlyLeakedCb>(
     MarkIndirectlyLeakedCb const &callback);
-template void ForEachChunk<ClearTagCb>(ClearTagCb const &callback);
+template void ForEachChunk<CollectSuppressedCb>(
+    CollectSuppressedCb const &callback);
 #endif  // CAN_SANITIZE_LEAKS
 }  // namespace __lsan
 
+extern "C" {
+void __lsan_disable() {
+  __asan_init();
+  __asan::AsanThread *t = __asan::GetCurrentThread();
+  CHECK(t);
+  t->disable_lsan();
+}
+
+void __lsan_enable() {
+  __asan_init();
+  __asan::AsanThread *t = __asan::GetCurrentThread();
+  CHECK(t);
+  t->enable_lsan();
+}
+}  // extern "C"
+
 // ---------------------- Interface ---------------- {{{1
 using namespace __asan;  // NOLINT
 

Modified: compiler-rt/trunk/lib/asan/asan_thread.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_thread.cc?rev=183099&r1=183098&r2=183099&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_thread.cc (original)
+++ compiler-rt/trunk/lib/asan/asan_thread.cc Mon Jun  3 06:21:34 2013
@@ -108,6 +108,7 @@ void AsanThread::Destroy() {
 
 void AsanThread::Init() {
   SetThreadStackAndTls();
+  lsan_disabled_ = 0;
   CHECK(AddrIsInMem(stack_bottom_));
   CHECK(AddrIsInMem(stack_top_ - 1));
   ClearShadowForThreadStackAndTLS();

Modified: compiler-rt/trunk/lib/asan/asan_thread.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_thread.h?rev=183099&r1=183098&r2=183099&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_thread.h (original)
+++ compiler-rt/trunk/lib/asan/asan_thread.h Mon Jun  3 06:21:34 2013
@@ -65,6 +65,15 @@ class AsanThread {
   uptr stack_size() { return stack_top_ - stack_bottom_; }
   uptr tls_begin() { return tls_begin_; }
   uptr tls_end() { return tls_end_; }
+  uptr lsan_disabled() { return lsan_disabled_; }
+  void disable_lsan() { lsan_disabled_++; }
+  void enable_lsan() {
+    if (!lsan_disabled_) {
+      Report("Unmatched call to __lsan_enable().\n");
+      Die();
+    }
+    lsan_disabled_--;
+  }
   u32 tid() { return context_->tid; }
   AsanThreadContext *context() { return context_; }
   void set_context(AsanThreadContext *context) { context_ = context; }
@@ -90,6 +99,7 @@ class AsanThread {
   uptr  stack_bottom_;
   uptr tls_begin_;
   uptr tls_end_;
+  uptr lsan_disabled_;
 
   FakeStack fake_stack_;
   AsanThreadLocalMallocStorage malloc_storage_;

Added: compiler-rt/trunk/lib/lsan/lit_tests/TestCases/disabler.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/lsan/lit_tests/TestCases/disabler.cc?rev=183099&view=auto
==============================================================================
--- compiler-rt/trunk/lib/lsan/lit_tests/TestCases/disabler.cc (added)
+++ compiler-rt/trunk/lib/lsan/lit_tests/TestCases/disabler.cc Mon Jun  3 06:21:34 2013
@@ -0,0 +1,23 @@
+// Test for ScopedDisabler.
+// RUN: LSAN_BASE="report_blocks=1:use_registers=0:use_stacks=0:use_globals=0:use_tls=0"
+// RUN: %clangxx_lsan -I %p/../../../../include %s -o %t
+// RUN: LSAN_OPTIONS=$LSAN_BASE %t 2>&1 | FileCheck %s
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "sanitizer/lsan_interface.h"
+
+int main() {
+  void **p;
+  {
+    __lsan::ScopedDisabler d;
+    p = new void *;
+  }
+  *reinterpret_cast<void **>(p) = malloc(666);
+  void *q = malloc(1337);
+  // Break optimization.
+  fprintf(stderr, "Test alloc: %p.\n", q);
+  return 0;
+}
+// CHECK: SUMMARY: LeakSanitizer: 1337 byte(s) leaked in 1 allocation(s)

Modified: compiler-rt/trunk/lib/lsan/lsan_allocator.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/lsan/lsan_allocator.cc?rev=183099&r1=183098&r2=183099&view=diff
==============================================================================
--- compiler-rt/trunk/lib/lsan/lsan_allocator.cc (original)
+++ compiler-rt/trunk/lib/lsan/lsan_allocator.cc Mon Jun  3 06:21:34 2013
@@ -44,6 +44,8 @@ typedef CombinedAllocator<PrimaryAllocat
 
 static Allocator allocator;
 static THREADLOCAL AllocatorCache cache;
+// All allocations made while this is > 0 will be treated as non-leaks.
+static THREADLOCAL uptr lsan_disabled;
 
 void InitializeAllocator() {
   allocator.Init();
@@ -61,6 +63,7 @@ static void RegisterAllocation(const Sta
   if (!p) return;
   ChunkMetadata *m = Metadata(p);
   CHECK(m);
+  m->tag = lsan_disabled ? kSuppressed : kDirectlyLeaked;
   m->stack_trace_id = StackDepotPut(stack.trace, stack.size);
   m->requested_size = size;
   atomic_store((atomic_uint8_t*)m, 1, memory_order_relaxed);
@@ -185,5 +188,21 @@ template void ForEachChunk<PrintLeakedCb
 template void ForEachChunk<CollectLeaksCb>(CollectLeaksCb const &callback);
 template void ForEachChunk<MarkIndirectlyLeakedCb>(
     MarkIndirectlyLeakedCb const &callback);
-template void ForEachChunk<ClearTagCb>(ClearTagCb const &callback);
+template void ForEachChunk<CollectSuppressedCb>(
+    CollectSuppressedCb const &callback);
 }  // namespace __lsan
+
+extern "C" {
+void __lsan_disable() {
+  __lsan::lsan_disabled++;
+}
+
+void __lsan_enable() {
+  if (!__lsan::lsan_disabled) {
+    Report("Unmatched call to __lsan_enable().\n");
+    Die();
+  }
+  __lsan::lsan_disabled--;
+}
+}  // extern "C"
+

Modified: compiler-rt/trunk/lib/lsan/lsan_common.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/lsan/lsan_common.cc?rev=183099&r1=183098&r2=183099&view=diff
==============================================================================
--- compiler-rt/trunk/lib/lsan/lsan_common.cc (original)
+++ compiler-rt/trunk/lib/lsan/lsan_common.cc Mon Jun  3 06:21:34 2013
@@ -78,8 +78,8 @@ static inline bool CanBeAHeapPointer(upt
 
 // Scan the memory range, looking for byte patterns that point into allocator
 // chunks. Mark those chunks with tag and add them to the frontier.
-// There are two usage modes for this function: finding non-leaked chunks
-// (tag = kReachable) and finding indirectly leaked chunks
+// There are two usage modes for this function: finding reachable or suppressed
+// chunks (tag = kReachable or kSuppressed) and finding indirectly leaked chunks
 // (tag = kIndirectlyLeaked). In the second case, there's no flood fill,
 // so frontier = 0.
 void ScanRangeForPointers(uptr begin, uptr end, InternalVector<uptr> *frontier,
@@ -93,12 +93,12 @@ void ScanRangeForPointers(uptr begin, up
   for (; pp + sizeof(uptr) <= end; pp += alignment) {
     void *p = *reinterpret_cast<void**>(pp);
     if (!CanBeAHeapPointer(reinterpret_cast<uptr>(p))) continue;
-    // FIXME: PointsIntoChunk is SLOW because GetBlockBegin() in
-    // LargeMmapAllocator involves a lock and a linear search.
     void *chunk = PointsIntoChunk(p);
     if (!chunk) continue;
     LsanMetadata m(chunk);
+    // Reachable beats suppressed beats leaked.
     if (m.tag() == kReachable) continue;
+    if (m.tag() == kSuppressed && tag != kReachable) continue;
     m.set_tag(tag);
     if (flags()->log_pointers)
       Report("%p: found %p pointing into chunk %p-%p of size %llu.\n", pp, p,
@@ -178,13 +178,13 @@ static void ProcessThreads(SuspendedThre
   }
 }
 
-static void FloodFillReachable(InternalVector<uptr> *frontier) {
+static void FloodFillTag(InternalVector<uptr> *frontier, ChunkTag tag) {
   while (frontier->size()) {
     uptr next_chunk = frontier->back();
     frontier->pop_back();
     LsanMetadata m(reinterpret_cast<void *>(next_chunk));
     ScanRangeForPointers(next_chunk, next_chunk + m.requested_size(), frontier,
-                         "HEAP", kReachable);
+                         "HEAP", tag);
   }
 }
 
@@ -199,6 +199,13 @@ void MarkIndirectlyLeakedCb::operator()(
   }
 }
 
+void CollectSuppressedCb::operator()(void *p) const {
+  p = GetUserBegin(p);
+  LsanMetadata m(p);
+  if (m.allocated() && m.tag() == kSuppressed)
+    frontier_->push_back(reinterpret_cast<uptr>(p));
+}
+
 // Set the appropriate tag on each chunk.
 static void ClassifyAllChunks(SuspendedThreadsList const &suspended_threads) {
   // Holds the flood fill frontier.
@@ -207,21 +214,24 @@ static void ClassifyAllChunks(SuspendedT
   if (flags()->use_globals)
     ProcessGlobalRegions(&frontier);
   ProcessThreads(suspended_threads, &frontier);
-  FloodFillReachable(&frontier);
+  FloodFillTag(&frontier, kReachable);
+  // The check here is relatively expensive, so we do this in a separate flood
+  // fill. That way we can skip the check for chunks that are reachable
+  // otherwise.
   ProcessPlatformSpecificAllocations(&frontier);
-  FloodFillReachable(&frontier);
+  FloodFillTag(&frontier, kReachable);
 
-  // Now all reachable chunks are marked. Iterate over leaked chunks and mark
-  // those that are reachable from other leaked chunks.
   if (flags()->log_pointers)
-    Report("Now scanning leaked blocks for pointers.\n");
-  ForEachChunk(MarkIndirectlyLeakedCb());
-}
+    Report("Scanning suppressed blocks.\n");
+  CHECK_EQ(0, frontier.size());
+  ForEachChunk(CollectSuppressedCb(&frontier));
+  FloodFillTag(&frontier, kSuppressed);
 
-void ClearTagCb::operator()(void *p) const {
-  p = GetUserBegin(p);
-  LsanMetadata m(p);
-  m.set_tag(kDirectlyLeaked);
+  // Iterate over leaked chunks and mark those that are reachable from other
+  // leaked chunks.
+  if (flags()->log_pointers)
+    Report("Scanning leaked blocks.\n");
+  ForEachChunk(MarkIndirectlyLeakedCb());
 }
 
 static void PrintStackTraceById(u32 stack_trace_id) {
@@ -232,21 +242,11 @@ static void PrintStackTraceById(u32 stac
                          common_flags()->strip_path_prefix, 0);
 }
 
-static void LockAndSuspendThreads(StopTheWorldCallback callback, void *arg) {
-  LockThreadRegistry();
-  LockAllocator();
-  StopTheWorld(callback, arg);
-  UnlockAllocator();
-  UnlockThreadRegistry();
-}
-
-///// Normal leak checking. /////
-
 void CollectLeaksCb::operator()(void *p) const {
   p = GetUserBegin(p);
   LsanMetadata m(p);
   if (!m.allocated()) return;
-  if (m.tag() != kReachable) {
+  if (m.tag() == kDirectlyLeaked || m.tag() == kIndirectlyLeaked) {
     uptr resolution = flags()->resolution;
     if (resolution > 0) {
       uptr size = 0;
@@ -268,8 +268,7 @@ void PrintLeakedCb::operator()(void *p)
   p = GetUserBegin(p);
   LsanMetadata m(p);
   if (!m.allocated()) return;
-  if (m.tag() != kReachable) {
-    CHECK(m.tag() == kDirectlyLeaked || m.tag() == kIndirectlyLeaked);
+  if (m.tag() == kDirectlyLeaked || m.tag() == kIndirectlyLeaked) {
     Printf("%s leaked %llu byte block at %p\n",
            m.tag() == kDirectlyLeaked ? "Directly" : "Indirectly",
            m.requested_size(), p);
@@ -308,13 +307,19 @@ static void DoLeakCheckCallback(const Su
     PrintLeaked();
   leak_report.PrintSummary();
   Printf("\n");
-  ForEachChunk(ClearTagCb());
   *result = kLeaksFound;
 }
 
 void DoLeakCheck() {
+  static bool already_done;
   LeakCheckResult result = kFatalError;
-  LockAndSuspendThreads(DoLeakCheckCallback, &result);
+  LockThreadRegistry();
+  LockAllocator();
+  CHECK(!already_done);
+  already_done = true;
+  StopTheWorld(DoLeakCheckCallback, &result);
+  UnlockAllocator();
+  UnlockThreadRegistry();
   if (result == kFatalError) {
     Report("LeakSanitizer has encountered a fatal error.\n");
     Die();
@@ -387,5 +392,6 @@ void LeakReport::PrintSummary() {
   Printf("SUMMARY: LeakSanitizer: %llu byte(s) leaked in %llu allocation(s).\n",
          bytes, allocations);
 }
+
 }  // namespace __lsan
 #endif  // CAN_SANITIZE_LEAKS

Modified: compiler-rt/trunk/lib/lsan/lsan_common.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/lsan/lsan_common.h?rev=183099&r1=183098&r2=183099&view=diff
==============================================================================
--- compiler-rt/trunk/lib/lsan/lsan_common.h (original)
+++ compiler-rt/trunk/lib/lsan/lsan_common.h Mon Jun  3 06:21:34 2013
@@ -32,7 +32,8 @@ namespace __lsan {
 enum ChunkTag {
   kDirectlyLeaked = 0,  // default
   kIndirectlyLeaked = 1,
-  kReachable = 2
+  kReachable = 2,
+  kSuppressed = 3
 };
 
 struct Flags {
@@ -135,17 +136,21 @@ class CollectLeaksCb {
   LeakReport *leak_report_;
 };
 
-// Resets each chunk's tag to default (kDirectlyLeaked).
-class ClearTagCb {
+// Scans each leaked chunk for pointers to other leaked chunks, and marks each
+// of them as indirectly leaked.
+class MarkIndirectlyLeakedCb {
  public:
   void operator()(void *p) const;
 };
 
-// Scans each leaked chunk for pointers to other leaked chunks, and marks each
-// of them as indirectly leaked.
-class MarkIndirectlyLeakedCb {
+// Finds all chunk marked as kSuppressed and adds their addresses to frontier.
+class CollectSuppressedCb {
  public:
+  explicit CollectSuppressedCb(InternalVector<uptr> *frontier)
+      : frontier_(frontier) {}
   void operator()(void *p) const;
+ private:
+  InternalVector<uptr> *frontier_;
 };
 
 // The following must be implemented in the parent tool.





More information about the llvm-commits mailing list