[compiler-rt] r267678 - tsan: split thread into logical and physical state

Dmitry Vyukov via llvm-commits llvm-commits at lists.llvm.org
Wed Apr 27 01:23:03 PDT 2016


Author: dvyukov
Date: Wed Apr 27 03:23:02 2016
New Revision: 267678

URL: http://llvm.org/viewvc/llvm-project?rev=267678&view=rev
Log:
tsan: split thread into logical and physical state

This is reincarnation of http://reviews.llvm.org/D17648 with the bug fix pointed out by Adhemerval (zatrazz).

Currently ThreadState holds both logical state (required for race-detection algorithm, user-visible)
and physical state (various caches, most notably malloc cache). Move physical state in a new
Process entity. Besides just being the right thing from abstraction point of view, this solves several
problems:

Cache everything on P level in Go. Currently we cache on a mix of goroutine and OS thread levels.
This unnecessary increases memory consumption.

Properly handle free operations in Go. Frees are issue by GC which don't have goroutine context.
As the result we could not do anything more than just clearing shadow. For example, we leaked
sync objects and heap block descriptors.

This will allow to get rid of libc malloc in Go (now we have Processor context for internal allocator cache).
This in turn will allow to get rid of dependency on libc entirely.

Potentially we can make Processor per-CPU in C++ mode instead of per-thread, which will
reduce resource consumption.
The distinction between Thread and Processor is currently used only by Go, C++ creates Processor per OS thread,
which is equivalent to the current scheme.


Modified:
    compiler-rt/trunk/lib/tsan/CMakeLists.txt
    compiler-rt/trunk/lib/tsan/go/build.bat
    compiler-rt/trunk/lib/tsan/go/buildgo.sh
    compiler-rt/trunk/lib/tsan/go/test.c
    compiler-rt/trunk/lib/tsan/go/tsan_go.cc
    compiler-rt/trunk/lib/tsan/rtl/tsan_defs.h
    compiler-rt/trunk/lib/tsan/rtl/tsan_interceptors.cc
    compiler-rt/trunk/lib/tsan/rtl/tsan_interface_java.cc
    compiler-rt/trunk/lib/tsan/rtl/tsan_mman.cc
    compiler-rt/trunk/lib/tsan/rtl/tsan_mman.h
    compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.cc
    compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.h
    compiler-rt/trunk/lib/tsan/rtl/tsan_rtl_mutex.cc
    compiler-rt/trunk/lib/tsan/rtl/tsan_rtl_thread.cc
    compiler-rt/trunk/lib/tsan/rtl/tsan_sync.cc
    compiler-rt/trunk/lib/tsan/rtl/tsan_sync.h
    compiler-rt/trunk/lib/tsan/tests/unit/tsan_sync_test.cc

Modified: compiler-rt/trunk/lib/tsan/CMakeLists.txt
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/CMakeLists.txt?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/CMakeLists.txt (original)
+++ compiler-rt/trunk/lib/tsan/CMakeLists.txt Wed Apr 27 03:23:02 2016
@@ -42,6 +42,7 @@ set(TSAN_SOURCES
   rtl/tsan_rtl.cc
   rtl/tsan_rtl_mutex.cc
   rtl/tsan_rtl_report.cc
+  rtl/tsan_rtl_proc.cc
   rtl/tsan_rtl_thread.cc
   rtl/tsan_stack_trace.cc
   rtl/tsan_stat.cc

Modified: compiler-rt/trunk/lib/tsan/go/build.bat
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/go/build.bat?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/go/build.bat (original)
+++ compiler-rt/trunk/lib/tsan/go/build.bat Wed Apr 27 03:23:02 2016
@@ -1,4 +1,4 @@
-type tsan_go.cc ..\rtl\tsan_interface_atomic.cc ..\rtl\tsan_clock.cc ..\rtl\tsan_flags.cc ..\rtl\tsan_md5.cc ..\rtl\tsan_mutex.cc ..\rtl\tsan_report.cc ..\rtl\tsan_rtl.cc ..\rtl\tsan_rtl_mutex.cc ..\rtl\tsan_rtl_report.cc ..\rtl\tsan_rtl_thread.cc ..\rtl\tsan_stat.cc ..\rtl\tsan_suppressions.cc ..\rtl\tsan_sync.cc ..\rtl\tsan_stack_trace.cc ..\..\sanitizer_common\sanitizer_allocator.cc ..\..\sanitizer_common\sanitizer_common.cc ..\..\sanitizer_common\sanitizer_flags.cc ..\..\sanitizer_common\sanitizer_stacktrace.cc ..\..\sanitizer_common\sanitizer_libc.cc ..\..\sanitizer_common\sanitizer_printf.cc ..\..\sanitizer_common\sanitizer_suppressions.cc ..\..\sanitizer_common\sanitizer_thread_registry.cc ..\rtl\tsan_platform_windows.cc ..\..\sanitizer_common\sanitizer_win.cc ..\..\sanitizer_common\sanitizer_deadlock_detector1.cc ..\..\sanitizer_common\sanitizer_stackdepot.cc ..\..\sanitizer_common\sanitizer_persistent_allocator.cc ..\..\sanitizer_common\sanitizer_flag_parser.cc ..\..\saniti
 zer_common\sanitizer_symbolizer.cc > gotsan.cc
+type tsan_go.cc ..\rtl\tsan_interface_atomic.cc ..\rtl\tsan_clock.cc ..\rtl\tsan_flags.cc ..\rtl\tsan_md5.cc ..\rtl\tsan_mutex.cc ..\rtl\tsan_report.cc ..\rtl\tsan_rtl.cc ..\rtl\tsan_rtl_mutex.cc ..\rtl\tsan_rtl_report.cc ..\rtl\tsan_rtl_thread.cc ..\rtl\tsan_rtl_proc.cc ..\rtl\tsan_stat.cc ..\rtl\tsan_suppressions.cc ..\rtl\tsan_sync.cc ..\rtl\tsan_stack_trace.cc ..\..\sanitizer_common\sanitizer_allocator.cc ..\..\sanitizer_common\sanitizer_common.cc ..\..\sanitizer_common\sanitizer_flags.cc ..\..\sanitizer_common\sanitizer_stacktrace.cc ..\..\sanitizer_common\sanitizer_libc.cc ..\..\sanitizer_common\sanitizer_printf.cc ..\..\sanitizer_common\sanitizer_suppressions.cc ..\..\sanitizer_common\sanitizer_thread_registry.cc ..\rtl\tsan_platform_windows.cc ..\..\sanitizer_common\sanitizer_win.cc ..\..\sanitizer_common\sanitizer_deadlock_detector1.cc ..\..\sanitizer_common\sanitizer_stackdepot.cc ..\..\sanitizer_common\sanitizer_persistent_allocator.cc ..\..\sanitizer_common\sanitizer_fla
 g_parser.cc ..\..\sanitizer_common\sanitizer_symbolizer.cc > gotsan.cc
 
 gcc -c -o race_windows_amd64.syso gotsan.cc -I..\rtl -I..\.. -I..\..\sanitizer_common -I..\..\..\include -m64 -Wall -fno-exceptions -fno-rtti -DSANITIZER_GO -Wno-error=attributes -Wno-attributes -Wno-format -Wno-maybe-uninitialized -DSANITIZER_DEBUG=0 -O3 -fomit-frame-pointer -std=c++11
 

Modified: compiler-rt/trunk/lib/tsan/go/buildgo.sh
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/go/buildgo.sh?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/go/buildgo.sh (original)
+++ compiler-rt/trunk/lib/tsan/go/buildgo.sh Wed Apr 27 03:23:02 2016
@@ -14,6 +14,7 @@ SRCS="
 	../rtl/tsan_rtl_mutex.cc
 	../rtl/tsan_rtl_report.cc
 	../rtl/tsan_rtl_thread.cc
+	../rtl/tsan_rtl_proc.cc
 	../rtl/tsan_stack_trace.cc
 	../rtl/tsan_stat.cc
 	../rtl/tsan_suppressions.cc
@@ -123,7 +124,7 @@ if [ "$SILENT" != "1" ]; then
 fi
 $CC $DIR/gotsan.cc -c -o $DIR/race_$SUFFIX.syso $FLAGS $CFLAGS
 
-$CC $OSCFLAGS test.c $DIR/race_$SUFFIX.syso -m64 -o $DIR/test $OSLDFLAGS
+$CC $OSCFLAGS test.c $DIR/race_$SUFFIX.syso -m64 -g -o $DIR/test $OSLDFLAGS
 
 export GORACE="exitcode=0 atexit_sleep_ms=0"
 if [ "$SILENT" != "1" ]; then

Modified: compiler-rt/trunk/lib/tsan/go/test.c
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/go/test.c?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/go/test.c (original)
+++ compiler-rt/trunk/lib/tsan/go/test.c Wed Apr 27 03:23:02 2016
@@ -13,17 +13,21 @@
 
 #include <stdio.h>
 
-void __tsan_init(void **thr, void (*cb)(long, void*));
+void __tsan_init(void **thr, void **proc, void (*cb)(long, void*));
 void __tsan_fini();
 void __tsan_map_shadow(void *addr, unsigned long size);
 void __tsan_go_start(void *thr, void **chthr, void *pc);
 void __tsan_go_end(void *thr);
+void __tsan_proc_create(void **pproc);
+void __tsan_proc_destroy(void *proc);
+void __tsan_proc_wire(void *proc, void *thr);
+void __tsan_proc_unwire(void *proc, void *thr);
 void __tsan_read(void *thr, void *addr, void *pc);
 void __tsan_write(void *thr, void *addr, void *pc);
 void __tsan_func_enter(void *thr, void *pc);
 void __tsan_func_exit(void *thr);
 void __tsan_malloc(void *thr, void *pc, void *p, unsigned long sz);
-void __tsan_free(void *thr, void *p, unsigned long sz);
+void __tsan_free(void *proc, void *p, unsigned long sz);
 void __tsan_acquire(void *thr, void *addr);
 void __tsan_release(void *thr, void *addr);
 void __tsan_release_merge(void *thr, void *addr);
@@ -37,10 +41,12 @@ void barfoo() {}
 
 int main(void) {
   void *thr0 = 0;
-  __tsan_init(&thr0, symbolize_cb);
+  void *proc0 = 0;
+  __tsan_init(&thr0, &proc0, symbolize_cb);
   char *buf = (char*)((unsigned long)buf0 + (64<<10) - 1 & ~((64<<10) - 1));
   __tsan_map_shadow(buf, 4096);
   __tsan_malloc(thr0, (char*)&barfoo + 1, buf, 10);
+  __tsan_free(proc0, buf, 10);
   __tsan_free(thr0, buf, 10);
   __tsan_func_enter(thr0, (char*)&main + 1);
   __tsan_malloc(thr0, (char*)&barfoo + 1, buf, 10);
@@ -50,6 +56,9 @@ int main(void) {
   __tsan_go_start(thr0, &thr1, (char*)&barfoo + 1);
   void *thr2 = 0;
   __tsan_go_start(thr0, &thr2, (char*)&barfoo + 1);
+  __tsan_func_exit(thr0);
+  __tsan_proc_unwire(proc0, thr0);
+  __tsan_proc_wire(proc0, thr1);
   __tsan_func_enter(thr1, (char*)&foobar + 1);
   __tsan_func_enter(thr1, (char*)&foobar + 1);
   __tsan_write(thr1, buf, (char*)&barfoo + 1);
@@ -57,11 +66,16 @@ int main(void) {
   __tsan_func_exit(thr1);
   __tsan_func_exit(thr1);
   __tsan_go_end(thr1);
+  void *proc1 = 0;
+  __tsan_proc_create(&proc1);
+  __tsan_proc_wire(proc1, thr2);
   __tsan_func_enter(thr2, (char*)&foobar + 1);
   __tsan_read(thr2, buf, (char*)&barfoo + 1);
+  __tsan_free(proc1, buf, 10);
   __tsan_func_exit(thr2);
   __tsan_go_end(thr2);
-  __tsan_func_exit(thr0);
+  __tsan_proc_destroy(proc0);
+  __tsan_proc_destroy(proc1);
   __tsan_fini();
   return 0;
 }

Modified: compiler-rt/trunk/lib/tsan/go/tsan_go.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/go/tsan_go.cc?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/go/tsan_go.cc (original)
+++ compiler-rt/trunk/lib/tsan/go/tsan_go.cc Wed Apr 27 03:23:02 2016
@@ -121,11 +121,13 @@ static ThreadState *AllocGoroutine() {
   return thr;
 }
 
-void __tsan_init(ThreadState **thrp, void (*cb)(uptr cmd, void *cb)) {
+void __tsan_init(ThreadState **thrp, Processor **procp,
+                 void (*cb)(uptr cmd, void *cb)) {
   go_runtime_cb = cb;
   ThreadState *thr = AllocGoroutine();
   main_thr = *thrp = thr;
   Initialize(thr);
+  *procp = thr->proc;
   inited = true;
 }
 
@@ -187,23 +189,46 @@ void __tsan_malloc(ThreadState *thr, upt
   MemoryResetRange(0, 0, (uptr)p, sz);
 }
 
-void __tsan_free(ThreadState *thr, uptr p, uptr sz) {
-  if (thr)
-    ctx->metamap.FreeRange(thr, 0, p, sz);
+void __tsan_free(Processor *proc, uptr p, uptr sz) {
+  ctx->metamap.FreeRange(proc, p, sz);
 }
 
 void __tsan_go_start(ThreadState *parent, ThreadState **pthr, void *pc) {
   ThreadState *thr = AllocGoroutine();
   *pthr = thr;
   int goid = ThreadCreate(parent, (uptr)pc, 0, true);
+  Processor *proc = parent->proc;
+  // Temporary borrow proc to handle goroutine start.
+  ProcUnwire(proc, parent);
+  ProcWire(proc, thr);
   ThreadStart(thr, goid, 0);
+  ProcUnwire(proc, thr);
+  ProcWire(proc, parent);
 }
 
 void __tsan_go_end(ThreadState *thr) {
+  Processor *proc = thr->proc;
   ThreadFinish(thr);
+  ProcUnwire(proc, thr);
   internal_free(thr);
 }
 
+void __tsan_proc_create(Processor **pproc) {
+  *pproc = ProcCreate();
+}
+
+void __tsan_proc_destroy(Processor *proc) {
+  ProcDestroy(proc);
+}
+
+void __tsan_proc_wire(Processor *proc, ThreadState *thr) {
+  ProcWire(proc, thr);
+}
+
+void __tsan_proc_unwire(Processor *proc, ThreadState *thr) {
+  ProcUnwire(proc, thr);
+}
+
 void __tsan_acquire(ThreadState *thr, void *addr) {
   Acquire(thr, 0, (uptr)addr);
 }

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_defs.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_defs.h?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_defs.h (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_defs.h Wed Apr 27 03:23:02 2016
@@ -149,6 +149,7 @@ struct MD5Hash {
 
 MD5Hash md5_hash(const void *data, uptr size);
 
+struct Processor;
 struct ThreadState;
 class ThreadContext;
 struct Context;

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_interceptors.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_interceptors.cc?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_interceptors.cc (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_interceptors.cc Wed Apr 27 03:23:02 2016
@@ -740,7 +740,7 @@ TSAN_INTERCEPTOR(int, munmap, void *addr
   if (sz != 0) {
     // If sz == 0, munmap will return EINVAL and don't unmap any memory.
     DontNeedShadowFor((uptr)addr, sz);
-    ctx->metamap.ResetRange(thr, pc, (uptr)addr, (uptr)sz);
+    ctx->metamap.ResetRange(thr->proc, (uptr)addr, (uptr)sz);
   }
   int res = REAL(munmap)(addr, sz);
   return res;
@@ -835,7 +835,10 @@ STDCXX_INTERCEPTOR(void, __cxa_guard_abo
 namespace __tsan {
 void DestroyThreadState() {
   ThreadState *thr = cur_thread();
+  Processor *proc = thr->proc;
   ThreadFinish(thr);
+  ProcUnwire(proc, thr);
+  ProcDestroy(proc);
   ThreadSignalContext *sctx = thr->signal_ctx;
   if (sctx) {
     thr->signal_ctx = 0;
@@ -886,6 +889,8 @@ extern "C" void *__tsan_thread_start_fun
 #endif
     while ((tid = atomic_load(&p->tid, memory_order_acquire)) == 0)
       internal_sched_yield();
+    Processor *proc = ProcCreate();
+    ProcWire(proc, thr);
     ThreadStart(thr, tid, GetTid());
     atomic_store(&p->tid, 0, memory_order_release);
   }

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_interface_java.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_interface_java.cc?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_interface_java.cc (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_interface_java.cc Wed Apr 27 03:23:02 2016
@@ -111,7 +111,7 @@ void __tsan_java_free(jptr ptr, jptr siz
   CHECK_GE(ptr, jctx->heap_begin);
   CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
 
-  ctx->metamap.FreeRange(thr, pc, ptr, size);
+  ctx->metamap.FreeRange(thr->proc, ptr, size);
 }
 
 void __tsan_java_move(jptr src, jptr dst, jptr size) {

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_mman.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_mman.cc?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_mman.cc (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_mman.cc Wed Apr 27 03:23:02 2016
@@ -67,14 +67,14 @@ void InitializeAllocator() {
   allocator()->Init(common_flags()->allocator_may_return_null);
 }
 
-void AllocatorThreadStart(ThreadState *thr) {
-  allocator()->InitCache(&thr->alloc_cache);
-  internal_allocator()->InitCache(&thr->internal_alloc_cache);
+void AllocatorProcStart(Processor *proc) {
+  allocator()->InitCache(&proc->alloc_cache);
+  internal_allocator()->InitCache(&proc->internal_alloc_cache);
 }
 
-void AllocatorThreadFinish(ThreadState *thr) {
-  allocator()->DestroyCache(&thr->alloc_cache);
-  internal_allocator()->DestroyCache(&thr->internal_alloc_cache);
+void AllocatorProcFinish(Processor *proc) {
+  allocator()->DestroyCache(&proc->alloc_cache);
+  internal_allocator()->DestroyCache(&proc->internal_alloc_cache);
 }
 
 void AllocatorPrintStats() {
@@ -98,7 +98,7 @@ static void SignalUnsafeCall(ThreadState
 void *user_alloc(ThreadState *thr, uptr pc, uptr sz, uptr align, bool signal) {
   if ((sz >= (1ull << 40)) || (align >= (1ull << 40)))
     return allocator()->ReturnNullOrDie();
-  void *p = allocator()->Allocate(&thr->alloc_cache, sz, align);
+  void *p = allocator()->Allocate(&thr->proc->alloc_cache, sz, align);
   if (p == 0)
     return 0;
   if (ctx && ctx->initialized)
@@ -120,7 +120,7 @@ void *user_calloc(ThreadState *thr, uptr
 void user_free(ThreadState *thr, uptr pc, void *p, bool signal) {
   if (ctx && ctx->initialized)
     OnUserFree(thr, pc, (uptr)p, true);
-  allocator()->Deallocate(&thr->alloc_cache, p);
+  allocator()->Deallocate(&thr->proc->alloc_cache, p);
   if (signal)
     SignalUnsafeCall(thr, pc);
 }
@@ -136,7 +136,7 @@ void OnUserAlloc(ThreadState *thr, uptr
 
 void OnUserFree(ThreadState *thr, uptr pc, uptr p, bool write) {
   CHECK_NE(p, (void*)0);
-  uptr sz = ctx->metamap.FreeBlock(thr, pc, p);
+  uptr sz = ctx->metamap.FreeBlock(thr->proc, p);
   DPrintf("#%d: free(%p, %zu)\n", thr->tid, p, sz);
   if (write && thr->ignore_reads_and_writes == 0)
     MemoryRangeFreed(thr, pc, (uptr)p, sz);
@@ -187,7 +187,7 @@ void *internal_alloc(MBlockType typ, upt
     thr->nomalloc = 0;  // CHECK calls internal_malloc().
     CHECK(0);
   }
-  return InternalAlloc(sz, &thr->internal_alloc_cache);
+  return InternalAlloc(sz, &thr->proc->internal_alloc_cache);
 }
 
 void internal_free(void *p) {
@@ -196,7 +196,7 @@ void internal_free(void *p) {
     thr->nomalloc = 0;  // CHECK calls internal_malloc().
     CHECK(0);
   }
-  InternalFree(p, &thr->internal_alloc_cache);
+  InternalFree(p, &thr->proc->internal_alloc_cache);
 }
 
 }  // namespace __tsan
@@ -238,8 +238,8 @@ uptr __sanitizer_get_allocated_size(cons
 
 void __tsan_on_thread_idle() {
   ThreadState *thr = cur_thread();
-  allocator()->SwallowCache(&thr->alloc_cache);
-  internal_allocator()->SwallowCache(&thr->internal_alloc_cache);
-  ctx->metamap.OnThreadIdle(thr);
+  allocator()->SwallowCache(&thr->proc->alloc_cache);
+  internal_allocator()->SwallowCache(&thr->proc->internal_alloc_cache);
+  ctx->metamap.OnProcIdle(thr->proc);
 }
 }  // extern "C"

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_mman.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_mman.h?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_mman.h (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_mman.h Wed Apr 27 03:23:02 2016
@@ -21,8 +21,8 @@ const uptr kDefaultAlignment = 16;
 
 void InitializeAllocator();
 void ReplaceSystemMalloc();
-void AllocatorThreadStart(ThreadState *thr);
-void AllocatorThreadFinish(ThreadState *thr);
+void AllocatorProcStart(Processor *proc);
+void AllocatorProcFinish(Processor *proc);
 void AllocatorPrintStats();
 
 // For user allocations.

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.cc?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.cc (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.cc Wed Apr 27 03:23:02 2016
@@ -329,6 +329,10 @@ void Initialize(ThreadState *thr) {
   InitializeAllocator();
   ReplaceSystemMalloc();
 #endif
+  if (common_flags()->detect_deadlocks)
+    ctx->dd = DDetector::Create(flags());
+  Processor *proc = ProcCreate();
+  ProcWire(proc, thr);
   InitializeInterceptors();
   CheckShadowMapping();
   InitializePlatform();
@@ -351,8 +355,6 @@ void Initialize(ThreadState *thr) {
   SetSandboxingCallback(StopBackgroundThread);
 #endif
 #endif
-  if (common_flags()->detect_deadlocks)
-    ctx->dd = DDetector::Create(flags());
 
   VPrintf(1, "***** Running under ThreadSanitizer v2 (pid %d) *****\n",
           (int)internal_getpid());

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.h?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.h (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.h Wed Apr 27 03:23:02 2016
@@ -325,6 +325,26 @@ struct JmpBuf {
   uptr *shadow_stack_pos;
 };
 
+// A Processor represents a physical thread, or a P for Go.
+// It is used to store internal resources like allocate cache, and does not
+// participate in race-detection logic (invisible to end user).
+// In C++ it is tied to an OS thread just like ThreadState, however ideally
+// it should be tied to a CPU (this way we will have fewer allocator caches).
+// In Go it is tied to a P, so there are significantly fewer Processor's than
+// ThreadState's (which are tied to Gs).
+// A ThreadState must be wired with a Processor to handle events.
+struct Processor {
+  ThreadState *thr; // currently wired thread, or nullptr
+#ifndef SANITIZER_GO
+  AllocatorCache alloc_cache;
+  InternalAllocatorCache internal_alloc_cache;
+#endif
+  DenseSlabAllocCache block_cache;
+  DenseSlabAllocCache sync_cache;
+  DenseSlabAllocCache clock_cache;
+  DDPhysicalThread *dd_pt;
+};
+
 // This struct is stored in TLS.
 struct ThreadState {
   FastState fast_state;
@@ -360,8 +380,6 @@ struct ThreadState {
   MutexSet mset;
   ThreadClock clock;
 #ifndef SANITIZER_GO
-  AllocatorCache alloc_cache;
-  InternalAllocatorCache internal_alloc_cache;
   Vector<JmpBuf> jmp_bufs;
   int ignore_interceptors;
 #endif
@@ -385,16 +403,14 @@ struct ThreadState {
 #if SANITIZER_DEBUG && !SANITIZER_GO
   InternalDeadlockDetector internal_deadlock_detector;
 #endif
-  DDPhysicalThread *dd_pt;
   DDLogicalThread *dd_lt;
 
+  // Current wired Processor, or nullptr. Required to handle any events.
+  Processor *proc;
+
   atomic_uintptr_t in_signal_handler;
   ThreadSignalContext *signal_ctx;
 
-  DenseSlabAllocCache block_cache;
-  DenseSlabAllocCache sync_cache;
-  DenseSlabAllocCache clock_cache;
-
 #ifndef SANITIZER_GO
   u32 last_sleep_stack_id;
   ThreadClock last_sleep_clock;
@@ -685,6 +701,11 @@ void ThreadSetName(ThreadState *thr, con
 int ThreadCount(ThreadState *thr);
 void ProcessPendingSignals(ThreadState *thr);
 
+Processor *ProcCreate();
+void ProcDestroy(Processor *proc);
+void ProcWire(Processor *proc, ThreadState *thr);
+void ProcUnwire(Processor *proc, ThreadState *thr);
+
 void MutexCreate(ThreadState *thr, uptr pc, uptr addr,
                  bool rw, bool recursive, bool linker_init);
 void MutexDestroy(ThreadState *thr, uptr pc, uptr addr);

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_rtl_mutex.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_rtl_mutex.cc?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_rtl_mutex.cc (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_rtl_mutex.cc Wed Apr 27 03:23:02 2016
@@ -32,7 +32,7 @@ struct Callback : DDCallback {
   Callback(ThreadState *thr, uptr pc)
       : thr(thr)
       , pc(pc) {
-    DDCallback::pt = thr->dd_pt;
+    DDCallback::pt = thr->proc->dd_pt;
     DDCallback::lt = thr->dd_lt;
   }
 
@@ -114,7 +114,7 @@ void MutexDestroy(ThreadState *thr, uptr
   u64 mid = s->GetId();
   u32 last_lock = s->last_lock;
   if (!unlock_locked)
-    s->Reset(thr);  // must not reset it before the report is printed
+    s->Reset(thr->proc);  // must not reset it before the report is printed
   s->mtx.Unlock();
   if (unlock_locked) {
     ThreadRegistryLock l(ctx->thread_registry);
@@ -132,7 +132,7 @@ void MutexDestroy(ThreadState *thr, uptr
   if (unlock_locked) {
     SyncVar *s = ctx->metamap.GetIfExistsAndLock(addr);
     if (s != 0) {
-      s->Reset(thr);
+      s->Reset(thr->proc);
       s->mtx.Unlock();
     }
   }
@@ -434,7 +434,7 @@ void AcquireImpl(ThreadState *thr, uptr
   if (thr->ignore_sync)
     return;
   thr->clock.set(thr->fast_state.epoch());
-  thr->clock.acquire(&thr->clock_cache, c);
+  thr->clock.acquire(&thr->proc->clock_cache, c);
   StatInc(thr, StatSyncAcquire);
 }
 
@@ -443,7 +443,7 @@ void ReleaseImpl(ThreadState *thr, uptr
     return;
   thr->clock.set(thr->fast_state.epoch());
   thr->fast_synch_epoch = thr->fast_state.epoch();
-  thr->clock.release(&thr->clock_cache, c);
+  thr->clock.release(&thr->proc->clock_cache, c);
   StatInc(thr, StatSyncRelease);
 }
 
@@ -452,7 +452,7 @@ void ReleaseStoreImpl(ThreadState *thr,
     return;
   thr->clock.set(thr->fast_state.epoch());
   thr->fast_synch_epoch = thr->fast_state.epoch();
-  thr->clock.ReleaseStore(&thr->clock_cache, c);
+  thr->clock.ReleaseStore(&thr->proc->clock_cache, c);
   StatInc(thr, StatSyncRelease);
 }
 
@@ -461,7 +461,7 @@ void AcquireReleaseImpl(ThreadState *thr
     return;
   thr->clock.set(thr->fast_state.epoch());
   thr->fast_synch_epoch = thr->fast_state.epoch();
-  thr->clock.acq_rel(&thr->clock_cache, c);
+  thr->clock.acq_rel(&thr->proc->clock_cache, c);
   StatInc(thr, StatSyncAcquire);
   StatInc(thr, StatSyncRelease);
 }

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_rtl_thread.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_rtl_thread.cc?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_rtl_thread.cc (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_rtl_thread.cc Wed Apr 27 03:23:02 2016
@@ -42,7 +42,7 @@ void ThreadContext::OnDead() {
 void ThreadContext::OnJoined(void *arg) {
   ThreadState *caller_thr = static_cast<ThreadState *>(arg);
   AcquireImpl(caller_thr, 0, &sync);
-  sync.Reset(&caller_thr->clock_cache);
+  sync.Reset(&caller_thr->proc->clock_cache);
 }
 
 struct OnCreatedArgs {
@@ -74,7 +74,7 @@ void ThreadContext::OnReset() {
 
 void ThreadContext::OnDetached(void *arg) {
   ThreadState *thr1 = static_cast<ThreadState*>(arg);
-  sync.Reset(&thr1->clock_cache);
+  sync.Reset(&thr1->proc->clock_cache);
 }
 
 struct OnStartedArgs {
@@ -106,13 +106,8 @@ void ThreadContext::OnStarted(void *arg)
   thr->shadow_stack_pos = thr->shadow_stack;
   thr->shadow_stack_end = thr->shadow_stack + kInitStackSize;
 #endif
-#ifndef SANITIZER_GO
-  AllocatorThreadStart(thr);
-#endif
-  if (common_flags()->detect_deadlocks) {
-    thr->dd_pt = ctx->dd->CreatePhysicalThread();
+  if (common_flags()->detect_deadlocks)
     thr->dd_lt = ctx->dd->CreateLogicalThread(unique_id);
-  }
   thr->fast_state.SetHistorySize(flags()->history_size);
   // Commit switch to the new part of the trace.
   // TraceAddEvent will reset stack0/mset0 in the new part for us.
@@ -121,7 +116,7 @@ void ThreadContext::OnStarted(void *arg)
   thr->fast_synch_epoch = epoch0;
   AcquireImpl(thr, 0, &sync);
   StatInc(thr, StatSyncAcquire);
-  sync.Reset(&thr->clock_cache);
+  sync.Reset(&thr->proc->clock_cache);
   thr->is_inited = true;
   DPrintf("#%d: ThreadStart epoch=%zu stk_addr=%zx stk_size=%zx "
           "tls_addr=%zx tls_size=%zx\n",
@@ -138,15 +133,8 @@ void ThreadContext::OnFinished() {
   }
   epoch1 = thr->fast_state.epoch();
 
-  if (common_flags()->detect_deadlocks) {
-    ctx->dd->DestroyPhysicalThread(thr->dd_pt);
+  if (common_flags()->detect_deadlocks)
     ctx->dd->DestroyLogicalThread(thr->dd_lt);
-  }
-  ctx->clock_alloc.FlushCache(&thr->clock_cache);
-  ctx->metamap.OnThreadIdle(thr);
-#ifndef SANITIZER_GO
-  AllocatorThreadFinish(thr);
-#endif
   thr->~ThreadState();
 #if TSAN_COLLECT_STATS
   StatAggregate(ctx->stat, thr->stat);

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_sync.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_sync.cc?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_sync.cc (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_sync.cc Wed Apr 27 03:23:02 2016
@@ -36,7 +36,7 @@ void SyncVar::Init(ThreadState *thr, upt
     DDMutexInit(thr, pc, this);
 }
 
-void SyncVar::Reset(ThreadState *thr) {
+void SyncVar::Reset(Processor *proc) {
   uid = 0;
   creation_stack_id = 0;
   owner_tid = kInvalidTid;
@@ -47,12 +47,12 @@ void SyncVar::Reset(ThreadState *thr) {
   is_broken = 0;
   is_linker_init = 0;
 
-  if (thr == 0) {
+  if (proc == 0) {
     CHECK_EQ(clock.size(), 0);
     CHECK_EQ(read_clock.size(), 0);
   } else {
-    clock.Reset(&thr->clock_cache);
-    read_clock.Reset(&thr->clock_cache);
+    clock.Reset(&proc->clock_cache);
+    read_clock.Reset(&proc->clock_cache);
   }
 }
 
@@ -61,7 +61,7 @@ MetaMap::MetaMap() {
 }
 
 void MetaMap::AllocBlock(ThreadState *thr, uptr pc, uptr p, uptr sz) {
-  u32 idx = block_alloc_.Alloc(&thr->block_cache);
+  u32 idx = block_alloc_.Alloc(&thr->proc->block_cache);
   MBlock *b = block_alloc_.Map(idx);
   b->siz = sz;
   b->tid = thr->tid;
@@ -71,16 +71,16 @@ void MetaMap::AllocBlock(ThreadState *th
   *meta = idx | kFlagBlock;
 }
 
-uptr MetaMap::FreeBlock(ThreadState *thr, uptr pc, uptr p) {
+uptr MetaMap::FreeBlock(Processor *proc, uptr p) {
   MBlock* b = GetBlock(p);
   if (b == 0)
     return 0;
   uptr sz = RoundUpTo(b->siz, kMetaShadowCell);
-  FreeRange(thr, pc, p, sz);
+  FreeRange(proc, p, sz);
   return sz;
 }
 
-bool MetaMap::FreeRange(ThreadState *thr, uptr pc, uptr p, uptr sz) {
+bool MetaMap::FreeRange(Processor *proc, uptr p, uptr sz) {
   bool has_something = false;
   u32 *meta = MemToMeta(p);
   u32 *end = MemToMeta(p + sz);
@@ -96,14 +96,14 @@ bool MetaMap::FreeRange(ThreadState *thr
     has_something = true;
     while (idx != 0) {
       if (idx & kFlagBlock) {
-        block_alloc_.Free(&thr->block_cache, idx & ~kFlagMask);
+        block_alloc_.Free(&proc->block_cache, idx & ~kFlagMask);
         break;
       } else if (idx & kFlagSync) {
         DCHECK(idx & kFlagSync);
         SyncVar *s = sync_alloc_.Map(idx & ~kFlagMask);
         u32 next = s->next;
-        s->Reset(thr);
-        sync_alloc_.Free(&thr->sync_cache, idx & ~kFlagMask);
+        s->Reset(proc);
+        sync_alloc_.Free(&proc->sync_cache, idx & ~kFlagMask);
         idx = next;
       } else {
         CHECK(0);
@@ -119,24 +119,24 @@ bool MetaMap::FreeRange(ThreadState *thr
 // which can be huge. The function probes pages one-by-one until it finds a page
 // without meta objects, at this point it stops freeing meta objects. Because
 // thread stacks grow top-down, we do the same starting from end as well.
-void MetaMap::ResetRange(ThreadState *thr, uptr pc, uptr p, uptr sz) {
+void MetaMap::ResetRange(Processor *proc, uptr p, uptr sz) {
   const uptr kMetaRatio = kMetaShadowCell / kMetaShadowSize;
   const uptr kPageSize = GetPageSizeCached() * kMetaRatio;
   if (sz <= 4 * kPageSize) {
     // If the range is small, just do the normal free procedure.
-    FreeRange(thr, pc, p, sz);
+    FreeRange(proc, p, sz);
     return;
   }
   // First, round both ends of the range to page size.
   uptr diff = RoundUp(p, kPageSize) - p;
   if (diff != 0) {
-    FreeRange(thr, pc, p, diff);
+    FreeRange(proc, p, diff);
     p += diff;
     sz -= diff;
   }
   diff = p + sz - RoundDown(p + sz, kPageSize);
   if (diff != 0) {
-    FreeRange(thr, pc, p + sz - diff, diff);
+    FreeRange(proc, p + sz - diff, diff);
     sz -= diff;
   }
   // Now we must have a non-empty page-aligned range.
@@ -147,7 +147,7 @@ void MetaMap::ResetRange(ThreadState *th
   const uptr sz0 = sz;
   // Probe start of the range.
   while (sz > 0) {
-    bool has_something = FreeRange(thr, pc, p, kPageSize);
+    bool has_something = FreeRange(proc, p, kPageSize);
     p += kPageSize;
     sz -= kPageSize;
     if (!has_something)
@@ -155,7 +155,7 @@ void MetaMap::ResetRange(ThreadState *th
   }
   // Probe end of the range.
   while (sz > 0) {
-    bool has_something = FreeRange(thr, pc, p - kPageSize, kPageSize);
+    bool has_something = FreeRange(proc, p - kPageSize, kPageSize);
     sz -= kPageSize;
     if (!has_something)
       break;
@@ -210,8 +210,8 @@ SyncVar* MetaMap::GetAndLock(ThreadState
       SyncVar * s = sync_alloc_.Map(idx & ~kFlagMask);
       if (s->addr == addr) {
         if (myidx != 0) {
-          mys->Reset(thr);
-          sync_alloc_.Free(&thr->sync_cache, myidx);
+          mys->Reset(thr->proc);
+          sync_alloc_.Free(&thr->proc->sync_cache, myidx);
         }
         if (write_lock)
           s->mtx.Lock();
@@ -230,7 +230,7 @@ SyncVar* MetaMap::GetAndLock(ThreadState
 
     if (myidx == 0) {
       const u64 uid = atomic_fetch_add(&uid_gen_, 1, memory_order_relaxed);
-      myidx = sync_alloc_.Alloc(&thr->sync_cache);
+      myidx = sync_alloc_.Alloc(&thr->proc->sync_cache);
       mys = sync_alloc_.Map(myidx);
       mys->Init(thr, pc, addr, uid);
     }
@@ -279,9 +279,9 @@ void MetaMap::MoveMemory(uptr src, uptr
   }
 }
 
-void MetaMap::OnThreadIdle(ThreadState *thr) {
-  block_alloc_.FlushCache(&thr->block_cache);
-  sync_alloc_.FlushCache(&thr->sync_cache);
+void MetaMap::OnProcIdle(Processor *proc) {
+  block_alloc_.FlushCache(&proc->block_cache);
+  sync_alloc_.FlushCache(&proc->sync_cache);
 }
 
 }  // namespace __tsan

Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_sync.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_sync.h?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_sync.h (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_sync.h Wed Apr 27 03:23:02 2016
@@ -47,7 +47,7 @@ struct SyncVar {
   SyncClock clock;
 
   void Init(ThreadState *thr, uptr pc, uptr addr, u64 uid);
-  void Reset(ThreadState *thr);
+  void Reset(Processor *proc);
 
   u64 GetId() const {
     // 47 lsb is addr, then 14 bits is low part of uid, then 3 zero bits.
@@ -72,9 +72,9 @@ class MetaMap {
   MetaMap();
 
   void AllocBlock(ThreadState *thr, uptr pc, uptr p, uptr sz);
-  uptr FreeBlock(ThreadState *thr, uptr pc, uptr p);
-  bool FreeRange(ThreadState *thr, uptr pc, uptr p, uptr sz);
-  void ResetRange(ThreadState *thr, uptr pc, uptr p, uptr sz);
+  uptr FreeBlock(Processor *proc, uptr p);
+  bool FreeRange(Processor *proc, uptr p, uptr sz);
+  void ResetRange(Processor *proc, uptr p, uptr sz);
   MBlock* GetBlock(uptr p);
 
   SyncVar* GetOrCreateAndLock(ThreadState *thr, uptr pc,
@@ -83,7 +83,7 @@ class MetaMap {
 
   void MoveMemory(uptr src, uptr dst, uptr sz);
 
-  void OnThreadIdle(ThreadState *thr);
+  void OnProcIdle(Processor *proc);
 
  private:
   static const u32 kFlagMask  = 3u << 30;

Modified: compiler-rt/trunk/lib/tsan/tests/unit/tsan_sync_test.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/tests/unit/tsan_sync_test.cc?rev=267678&r1=267677&r2=267678&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/tests/unit/tsan_sync_test.cc (original)
+++ compiler-rt/trunk/lib/tsan/tests/unit/tsan_sync_test.cc Wed Apr 27 03:23:02 2016
@@ -25,7 +25,7 @@ TEST(MetaMap, Basic) {
   EXPECT_NE(mb, (MBlock*)0);
   EXPECT_EQ(mb->siz, 1 * sizeof(u64));
   EXPECT_EQ(mb->tid, thr->tid);
-  uptr sz = m->FreeBlock(thr, 0, (uptr)&block[0]);
+  uptr sz = m->FreeBlock(thr->proc, (uptr)&block[0]);
   EXPECT_EQ(sz, 1 * sizeof(u64));
   mb = m->GetBlock((uptr)&block[0]);
   EXPECT_EQ(mb, (MBlock*)0);
@@ -41,7 +41,7 @@ TEST(MetaMap, FreeRange) {
   EXPECT_EQ(mb1->siz, 1 * sizeof(u64));
   MBlock *mb2 = m->GetBlock((uptr)&block[1]);
   EXPECT_EQ(mb2->siz, 3 * sizeof(u64));
-  m->FreeRange(thr, 0, (uptr)&block[0], 4 * sizeof(u64));
+  m->FreeRange(thr->proc, (uptr)&block[0], 4 * sizeof(u64));
   mb1 = m->GetBlock((uptr)&block[0]);
   EXPECT_EQ(mb1, (MBlock*)0);
   mb2 = m->GetBlock((uptr)&block[1]);
@@ -63,12 +63,12 @@ TEST(MetaMap, Sync) {
   EXPECT_NE(s2, (SyncVar*)0);
   EXPECT_EQ(s2->addr, (uptr)&block[1]);
   s2->mtx.ReadUnlock();
-  m->FreeBlock(thr, 0, (uptr)&block[0]);
+  m->FreeBlock(thr->proc, (uptr)&block[0]);
   s1 = m->GetIfExistsAndLock((uptr)&block[0]);
   EXPECT_EQ(s1, (SyncVar*)0);
   s2 = m->GetIfExistsAndLock((uptr)&block[1]);
   EXPECT_EQ(s2, (SyncVar*)0);
-  m->OnThreadIdle(thr);
+  m->OnProcIdle(thr->proc);
 }
 
 TEST(MetaMap, MoveMemory) {
@@ -105,7 +105,7 @@ TEST(MetaMap, MoveMemory) {
   EXPECT_NE(s2, (SyncVar*)0);
   EXPECT_EQ(s2->addr, (uptr)&block2[1]);
   s2->mtx.Unlock();
-  m->FreeRange(thr, 0, (uptr)&block2[0], 4 * sizeof(u64));
+  m->FreeRange(thr->proc, (uptr)&block2[0], 4 * sizeof(u64));
 }
 
 TEST(MetaMap, ResetSync) {
@@ -114,9 +114,9 @@ TEST(MetaMap, ResetSync) {
   u64 block[1] = {};  // fake malloc block
   m->AllocBlock(thr, 0, (uptr)&block[0], 1 * sizeof(u64));
   SyncVar *s = m->GetOrCreateAndLock(thr, 0, (uptr)&block[0], true);
-  s->Reset(thr);
+  s->Reset(thr->proc);
   s->mtx.Unlock();
-  uptr sz = m->FreeBlock(thr, 0, (uptr)&block[0]);
+  uptr sz = m->FreeBlock(thr->proc, (uptr)&block[0]);
   EXPECT_EQ(sz, 1 * sizeof(u64));
 }
 




More information about the llvm-commits mailing list