[llvm-commits] [compiler-rt] r170707 - in /compiler-rt/trunk/lib/tsan/rtl: tsan_interface_java.cc tsan_mutex.h tsan_rtl.h tsan_stat.h tsan_sync.cc tsan_sync.h
Dmitry Vyukov
dvyukov at google.com
Thu Dec 20 09:29:34 PST 2012
Author: dvyukov
Date: Thu Dec 20 11:29:34 2012
New Revision: 170707
URL: http://llvm.org/viewvc/llvm-project?rev=170707&view=rev
Log:
tsan: java interface implementation skeleton
Modified:
compiler-rt/trunk/lib/tsan/rtl/tsan_interface_java.cc
compiler-rt/trunk/lib/tsan/rtl/tsan_mutex.h
compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.h
compiler-rt/trunk/lib/tsan/rtl/tsan_stat.h
compiler-rt/trunk/lib/tsan/rtl/tsan_sync.cc
compiler-rt/trunk/lib/tsan/rtl/tsan_sync.h
Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_interface_java.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_interface_java.cc?rev=170707&r1=170706&r2=170707&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_interface_java.cc (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_interface_java.cc Thu Dec 20 11:29:34 2012
@@ -13,34 +13,261 @@
#include "tsan_interface_java.h"
#include "tsan_rtl.h"
+#include "tsan_mutex.h"
+#include "sanitizer_common/sanitizer_internal_defs.h"
+#include "sanitizer_common/sanitizer_common.h"
+#include "sanitizer_common/sanitizer_placement_new.h"
using namespace __tsan; // NOLINT
+namespace __tsan {
+
+const uptr kHeapShadow = 0x300000000000ull;
+const uptr kHeapAlignment = 8;
+
+struct BlockDesc {
+ bool begin;
+ Mutex mtx;
+ SyncVar *head;
+
+ BlockDesc()
+ : mtx(MutexTypeJava, StatMtxJava)
+ , head() {
+ CHECK_EQ(begin, false);
+ begin = true;
+ }
+
+ explicit BlockDesc(BlockDesc *b)
+ : mtx(MutexTypeJava, StatMtxJava)
+ , head(b->head) {
+ CHECK_EQ(begin, false);
+ begin = true;
+ b->head = 0;
+ }
+
+ ~BlockDesc() {
+ CHECK_EQ(begin, true);
+ begin = false;
+ ThreadState *thr = cur_thread();
+ SyncVar *s = head;
+ while (s) {
+ SyncVar *s1 = s->next;
+ StatInc(thr, StatSyncDestroyed);
+ s->mtx.Lock();
+ s->mtx.Unlock();
+ thr->mset.Remove(s->GetId());
+ DestroyAndFree(s);
+ s = s1;
+ }
+ }
+};
+
+struct JavaContext {
+ Mutex mtx;
+ const uptr heap_begin;
+ const uptr heap_size;
+ BlockDesc *heap_shadow;
+
+ JavaContext(jptr heap_begin, jptr heap_size)
+ : mtx(MutexTypeJava, StatMtxJava)
+ , heap_begin(heap_begin)
+ , heap_size(heap_size) {
+ uptr size = heap_size / kHeapAlignment * sizeof(BlockDesc);
+ heap_shadow = (BlockDesc*)MmapFixedNoReserve(kHeapShadow, size);
+ if ((uptr)heap_shadow != kHeapShadow) {
+ Printf("ThreadSanitizer: failed to mmap Java heap shadow\n");
+ Die();
+ }
+ }
+};
+
+class ScopedJavaFunc {
+ public:
+ ScopedJavaFunc(ThreadState *thr, uptr pc)
+ : thr_(thr) {
+ Initialize(thr_);
+ FuncEntry(thr, pc);
+ CHECK_EQ(thr_->in_rtl, 0);
+ thr_->in_rtl++;
+ }
+
+ ~ScopedJavaFunc() {
+ thr_->in_rtl--;
+ CHECK_EQ(thr_->in_rtl, 0);
+ FuncExit(thr_);
+ // FIXME(dvyukov): process pending signals.
+ }
+
+ private:
+ ThreadState *thr_;
+};
+
+static u64 jctx_buf[sizeof(JavaContext) / sizeof(u64) + 1];
+static JavaContext *jctx;
+
+static BlockDesc *getblock(uptr addr) {
+ uptr i = (addr - jctx->heap_begin) / kHeapAlignment;
+ return &jctx->heap_shadow[i];
+}
+
+static BlockDesc *getblockbegin(uptr addr) {
+ for (BlockDesc *b = getblock(addr);; b--) {
+ CHECK_GE(b, jctx->heap_shadow);
+ if (b->begin)
+ return b;
+ }
+ return 0;
+}
+
+SyncVar* GetJavaSync(ThreadState *thr, uptr pc, uptr addr,
+ bool write_lock, bool create) {
+ if (jctx == 0 || addr < jctx->heap_begin
+ || addr >= jctx->heap_begin + jctx->heap_size)
+ return 0;
+ BlockDesc *b = getblockbegin(addr);
+ Lock l(&b->mtx);
+ SyncVar *s = b->head;
+ for (; s; s = s->next) {
+ if (s->addr == addr)
+ break;
+ }
+ if (s == 0 && create) {
+ s = CTX()->synctab.Create(thr, pc, addr);
+ s->next = b->head;
+ b->head = s;
+ }
+ if (s) {
+ if (write_lock)
+ s->mtx.Lock();
+ else
+ s->mtx.ReadLock();
+ }
+ return s;
+}
+
+SyncVar* GetAndRemoveJavaSync(ThreadState *thr, uptr pc, uptr addr) {
+ // We do not destroy Java mutexes other than in __tsan_java_free().
+ return 0;
+}
+
+} // namespace __tsan {
+
+#define SCOPED_JAVA_FUNC(func) \
+ ThreadState *thr = cur_thread(); \
+ const uptr caller_pc = GET_CALLER_PC(); \
+ const uptr pc = (uptr)&func; \
+ (void)pc; \
+ ScopedJavaFunc scoped(thr, caller_pc); \
+/**/
+
void __tsan_java_init(jptr heap_begin, jptr heap_size) {
+ SCOPED_JAVA_FUNC(__tsan_java_init);
+ DPrintf("#%d: java_init(%p, %p)\n", thr->tid, heap_begin, heap_size);
+ CHECK_EQ(jctx, 0);
+ CHECK_GT(heap_begin, 0);
+ CHECK_GT(heap_size, 0);
+ CHECK_LT(heap_begin, heap_begin + heap_size);
+ jctx = new(jctx_buf) JavaContext(heap_begin, heap_size);
}
int __tsan_java_fini() {
- return 0;
+ SCOPED_JAVA_FUNC(__tsan_java_fini);
+ DPrintf("#%d: java_fini()\n", thr->tid);
+ CHECK_NE(jctx, 0);
+ // FIXME(dvyukov): this does not call atexit() callbacks.
+ int status = Finalize(thr);
+ DPrintf("#%d: java_fini() = %d\n", thr->tid, status);
+ return status;
}
void __tsan_java_alloc(jptr ptr, jptr size) {
+ SCOPED_JAVA_FUNC(__tsan_java_alloc);
+ DPrintf("#%d: java_alloc(%p, %p)\n", thr->tid, ptr, size);
+ CHECK_NE(jctx, 0);
+ CHECK_NE(size, 0);
+ CHECK_GE(ptr, jctx->heap_begin);
+ CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
+
+ BlockDesc *b = getblock(ptr);
+ new(b) BlockDesc();
}
void __tsan_java_free(jptr ptr, jptr size) {
+ SCOPED_JAVA_FUNC(__tsan_java_free);
+ DPrintf("#%d: java_free(%p, %p)\n", thr->tid, ptr, size);
+ CHECK_NE(jctx, 0);
+ CHECK_NE(size, 0);
+ CHECK_GE(ptr, jctx->heap_begin);
+ CHECK_LE(ptr + size, jctx->heap_begin + jctx->heap_size);
+
+ BlockDesc *beg = getblock(ptr);
+ BlockDesc *end = getblock(ptr + size);
+ for (BlockDesc *b = beg; b != end; b++) {
+ if (b->begin)
+ b->~BlockDesc();
+ }
}
void __tsan_java_move(jptr src, jptr dst, jptr size) {
+ SCOPED_JAVA_FUNC(__tsan_java_move);
+ DPrintf("#%d: java_move(%p, %p, %p)\n", thr->tid, src, dst, size);
+ CHECK_NE(jctx, 0);
+ CHECK_NE(size, 0);
+ CHECK_GE(src, jctx->heap_begin);
+ CHECK_LE(src + size, jctx->heap_begin + jctx->heap_size);
+ CHECK_GE(dst, jctx->heap_begin);
+ CHECK_LE(dst + size, jctx->heap_begin + jctx->heap_size);
+ CHECK(dst >= src + size || src >= dst + size);
+
+ // Assuming it's not running concurrently with threads that do
+ // memory accesses and mutex operations (stop-the-world phase).
+ BlockDesc *srcbeg = getblock(src);
+ BlockDesc *dstbeg = getblock(dst);
+ BlockDesc *srcend = getblock(src + size);
+ for (BlockDesc *s = srcbeg, *d = dstbeg; s != srcend; s++, d++) {
+ if (s->begin) {
+ new(d) BlockDesc(s);
+ s->~BlockDesc();
+ }
+ }
}
void __tsan_java_mutex_lock(jptr addr) {
+ SCOPED_JAVA_FUNC(__tsan_java_mutex_lock);
+ DPrintf("#%d: java_mutex_lock(%p)\n", thr->tid, addr);
+ CHECK_NE(jctx, 0);
+ CHECK_GE(addr, jctx->heap_begin);
+ CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
+
+ MutexLock(thr, pc, addr);
}
void __tsan_java_mutex_unlock(jptr addr) {
+ SCOPED_JAVA_FUNC(__tsan_java_mutex_unlock);
+ DPrintf("#%d: java_mutex_unlock(%p)\n", thr->tid, addr);
+ CHECK_NE(jctx, 0);
+ CHECK_GE(addr, jctx->heap_begin);
+ CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
+
+ MutexUnlock(thr, pc, addr);
}
void __tsan_java_mutex_read_lock(jptr addr) {
+ SCOPED_JAVA_FUNC(__tsan_java_mutex_read_lock);
+ DPrintf("#%d: java_mutex_read_lock(%p)\n", thr->tid, addr);
+ CHECK_NE(jctx, 0);
+ CHECK_GE(addr, jctx->heap_begin);
+ CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
+
+ MutexReadLock(thr, pc, addr);
}
void __tsan_java_mutex_read_unlock(jptr addr) {
-}
+ SCOPED_JAVA_FUNC(__tsan_java_mutex_read_unlock);
+ DPrintf("#%d: java_mutex_read_unlock(%p)\n", thr->tid, addr);
+ CHECK_NE(jctx, 0);
+ CHECK_GE(addr, jctx->heap_begin);
+ CHECK_LT(addr, jctx->heap_begin + jctx->heap_size);
+ MutexReadUnlock(thr, pc, addr);
+}
Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_mutex.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_mutex.h?rev=170707&r1=170706&r2=170707&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_mutex.h (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_mutex.h Thu Dec 20 11:29:34 2012
@@ -30,6 +30,7 @@
MutexTypeAnnotations,
MutexTypeAtExit,
MutexTypeMBlock,
+ MutexTypeJava,
// This must be the last.
MutexTypeCount
Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.h?rev=170707&r1=170706&r2=170707&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.h (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.h Thu Dec 20 11:29:34 2012
@@ -515,6 +515,10 @@
void Initialize(ThreadState *thr);
int Finalize(ThreadState *thr);
+SyncVar* GetJavaSync(ThreadState *thr, uptr pc, uptr addr,
+ bool write_lock, bool create);
+SyncVar* GetAndRemoveJavaSync(ThreadState *thr, uptr pc, uptr addr);
+
void MemoryAccess(ThreadState *thr, uptr pc, uptr addr,
int kAccessSizeLog, bool kAccessIsWrite);
void MemoryAccessImpl(ThreadState *thr, uptr addr,
Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_stat.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_stat.h?rev=170707&r1=170706&r2=170707&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_stat.h (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_stat.h Thu Dec 20 11:29:34 2012
@@ -281,6 +281,7 @@
StatMtxAnnotations,
StatMtxAtExit,
StatMtxMBlock,
+ StatMtxJava,
// This must be the last.
StatCnt
Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_sync.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_sync.cc?rev=170707&r1=170706&r2=170707&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_sync.cc (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_sync.cc Thu Dec 20 11:29:34 2012
@@ -57,9 +57,26 @@
return GetAndLock(0, 0, addr, write_lock, false);
}
+SyncVar* SyncTab::Create(ThreadState *thr, uptr pc, uptr addr) {
+ StatInc(thr, StatSyncCreated);
+ void *mem = internal_alloc(MBlockSync, sizeof(SyncVar));
+ const u64 uid = atomic_fetch_add(&uid_gen_, 1, memory_order_relaxed);
+ SyncVar *res = new(mem) SyncVar(addr, uid);
+#ifndef TSAN_GO
+ res->creation_stack.ObtainCurrent(thr, pc);
+#endif
+ return res;
+}
+
SyncVar* SyncTab::GetAndLock(ThreadState *thr, uptr pc,
uptr addr, bool write_lock, bool create) {
#ifndef TSAN_GO
+ { // NOLINT
+ SyncVar *res = GetJavaSync(thr, pc, addr, write_lock, create);
+ if (res)
+ return res;
+ }
+
// Here we ask only PrimaryAllocator, because
// SecondaryAllocator::PointerIsMine() is slow and we have fallback on
// the hashmap anyway.
@@ -74,11 +91,7 @@
if (res == 0) {
if (!create)
return 0;
- StatInc(thr, StatSyncCreated);
- void *mem = internal_alloc(MBlockSync, sizeof(SyncVar));
- const u64 uid = atomic_fetch_add(&uid_gen_, 1, memory_order_relaxed);
- res = new(mem) SyncVar(addr, uid);
- res->creation_stack.ObtainCurrent(thr, pc);
+ res = Create(thr, pc, addr);
res->next = b->head;
b->head = res;
}
@@ -113,13 +126,7 @@
break;
}
if (res == 0) {
- StatInc(thr, StatSyncCreated);
- void *mem = internal_alloc(MBlockSync, sizeof(SyncVar));
- const u64 uid = atomic_fetch_add(&uid_gen_, 1, memory_order_relaxed);
- res = new(mem) SyncVar(addr, uid);
-#ifndef TSAN_GO
- res->creation_stack.ObtainCurrent(thr, pc);
-#endif
+ res = Create(thr, pc, addr);
res->next = p->val;
p->val = res;
}
@@ -133,6 +140,11 @@
SyncVar* SyncTab::GetAndRemove(ThreadState *thr, uptr pc, uptr addr) {
#ifndef TSAN_GO
+ { // NOLINT
+ SyncVar *res = GetAndRemoveJavaSync(thr, pc, addr);
+ if (res)
+ return res;
+ }
if (PrimaryAllocator::PointerIsMine((void*)addr)) {
MBlock *b = user_mblock(thr, (void*)addr);
SyncVar *res = 0;
Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_sync.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_sync.h?rev=170707&r1=170706&r2=170707&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_sync.h (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_sync.h Thu Dec 20 11:29:34 2012
@@ -96,6 +96,8 @@
// If the SyncVar does not exist, returns 0.
SyncVar* GetAndRemove(ThreadState *thr, uptr pc, uptr addr);
+ SyncVar* Create(ThreadState *thr, uptr pc, uptr addr);
+
uptr GetMemoryConsumption(uptr *nsync);
private:
More information about the llvm-commits
mailing list