[compiler-rt] r220637 - Change StackDepot interface to use StackTrace more extensively
Alexey Samsonov
vonosmas at gmail.com
Sat Oct 25 23:23:08 PDT 2014
Author: samsonov
Date: Sun Oct 26 01:23:07 2014
New Revision: 220637
URL: http://llvm.org/viewvc/llvm-project?rev=220637&view=rev
Log:
Change StackDepot interface to use StackTrace more extensively
Modified:
compiler-rt/trunk/lib/asan/asan_allocator2.cc
compiler-rt/trunk/lib/asan/asan_globals.cc
compiler-rt/trunk/lib/asan/asan_thread.cc
compiler-rt/trunk/lib/lsan/lsan_allocator.cc
compiler-rt/trunk/lib/lsan/lsan_common.cc
compiler-rt/trunk/lib/lsan/lsan_common_linux.cc
compiler-rt/trunk/lib/msan/msan.cc
compiler-rt/trunk/lib/msan/msan_allocator.cc
compiler-rt/trunk/lib/msan/msan_interceptors.cc
compiler-rt/trunk/lib/sanitizer_common/sanitizer_stackdepot.cc
compiler-rt/trunk/lib/sanitizer_common/sanitizer_stackdepot.h
compiler-rt/trunk/lib/sanitizer_common/sanitizer_stacktrace.h
compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_stackdepot_test.cc
compiler-rt/trunk/lib/tsan/dd/dd_rtl.cc
compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.cc
Modified: compiler-rt/trunk/lib/asan/asan_allocator2.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_allocator2.cc?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_allocator2.cc (original)
+++ compiler-rt/trunk/lib/asan/asan_allocator2.cc Sun Oct 26 01:23:07 2014
@@ -354,7 +354,7 @@ static void *Allocate(uptr size, uptr al
meta[1] = chunk_beg;
}
- m->alloc_context_id = StackDepotPut(stack->trace, stack->size);
+ m->alloc_context_id = StackDepotPut(*stack);
uptr size_rounded_down_to_granularity = RoundDownTo(size, SHADOW_GRANULARITY);
// Unpoison the bulk of the memory region.
@@ -423,7 +423,7 @@ static void QuarantineChunk(AsanChunk *m
CHECK_EQ(m->free_tid, kInvalidTid);
AsanThread *t = GetCurrentThread();
m->free_tid = t ? t->tid() : 0;
- m->free_context_id = StackDepotPut(stack->trace, stack->size);
+ m->free_context_id = StackDepotPut(*stack);
// Poison the region.
PoisonShadow(m->Beg(),
RoundUpTo(m->UsedSize(), SHADOW_GRANULARITY),
Modified: compiler-rt/trunk/lib/asan/asan_globals.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_globals.cc?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_globals.cc (original)
+++ compiler-rt/trunk/lib/asan/asan_globals.cc Sun Oct 26 01:23:07 2014
@@ -217,7 +217,7 @@ using namespace __asan; // NOLINT
void __asan_register_globals(__asan_global *globals, uptr n) {
if (!flags()->report_globals) return;
GET_STACK_TRACE_FATAL_HERE;
- u32 stack_id = StackDepotPut(stack.trace, stack.size);
+ u32 stack_id = StackDepotPut(stack);
BlockingMutexLock lock(&mu_for_globals);
if (!global_registration_site_vector)
global_registration_site_vector =
Modified: compiler-rt/trunk/lib/asan/asan_thread.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_thread.cc?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_thread.cc (original)
+++ compiler-rt/trunk/lib/asan/asan_thread.cc Sun Oct 26 01:23:07 2014
@@ -30,7 +30,7 @@ namespace __asan {
void AsanThreadContext::OnCreated(void *arg) {
CreateThreadContextArgs *args = static_cast<CreateThreadContextArgs*>(arg);
if (args->stack)
- stack_id = StackDepotPut(args->stack->trace, args->stack->size);
+ stack_id = StackDepotPut(*args->stack);
thread = args->thread;
thread->set_context(this);
}
Modified: compiler-rt/trunk/lib/lsan/lsan_allocator.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/lsan/lsan_allocator.cc?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/lsan/lsan_allocator.cc (original)
+++ compiler-rt/trunk/lib/lsan/lsan_allocator.cc Sun Oct 26 01:23:07 2014
@@ -63,7 +63,7 @@ static void RegisterAllocation(const Sta
ChunkMetadata *m = Metadata(p);
CHECK(m);
m->tag = DisabledInThisThread() ? kIgnored : kDirectlyLeaked;
- m->stack_trace_id = StackDepotPut(stack.trace, stack.size);
+ m->stack_trace_id = StackDepotPut(stack);
m->requested_size = size;
atomic_store(reinterpret_cast<atomic_uint8_t *>(m), 1, memory_order_relaxed);
}
Modified: compiler-rt/trunk/lib/lsan/lsan_common.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/lsan/lsan_common.cc?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/lsan/lsan_common.cc (original)
+++ compiler-rt/trunk/lib/lsan/lsan_common.cc Sun Oct 26 01:23:07 2014
@@ -371,8 +371,8 @@ static void CollectLeaksCb(uptr chunk, v
u32 stack_trace_id = 0;
if (resolution > 0) {
StackTrace stack = StackDepotGet(m.stack_trace_id());
- uptr size = Min(stack.size, resolution);
- stack_trace_id = StackDepotPut(stack.trace, size);
+ stack.size = Min(stack.size, resolution);
+ stack_trace_id = StackDepotPut(stack);
} else {
stack_trace_id = m.stack_trace_id();
}
Modified: compiler-rt/trunk/lib/lsan/lsan_common_linux.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/lsan/lsan_common_linux.cc?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/lsan/lsan_common_linux.cc (original)
+++ compiler-rt/trunk/lib/lsan/lsan_common_linux.cc Sun Oct 26 01:23:07 2014
@@ -94,11 +94,10 @@ void ProcessGlobalRegions(Frontier *fron
static uptr GetCallerPC(u32 stack_id, StackDepotReverseMap *map) {
CHECK(stack_id);
- uptr size = 0;
- const uptr *trace = map->Get(stack_id, &size);
+ StackTrace stack = map->Get(stack_id);
// The top frame is our malloc/calloc/etc. The next frame is the caller.
- if (size >= 2)
- return trace[1];
+ if (stack.size >= 2)
+ return stack.trace[1];
return 0;
}
Modified: compiler-rt/trunk/lib/msan/msan.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/msan/msan.cc?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/msan/msan.cc (original)
+++ compiler-rt/trunk/lib/msan/msan.cc Sun Oct 26 01:23:07 2014
@@ -280,7 +280,7 @@ u32 ChainOrigin(u32 id, StackTrace *stac
}
}
- StackDepotHandle h = StackDepotPut_WithHandle(stack->trace, stack->size);
+ StackDepotHandle h = StackDepotPut_WithHandle(*stack);
if (!h.valid()) return id;
if (flags()->origin_history_per_stack_limit > 0) {
Modified: compiler-rt/trunk/lib/msan/msan_allocator.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/msan/msan_allocator.cc?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/msan/msan_allocator.cc (original)
+++ compiler-rt/trunk/lib/msan/msan_allocator.cc Sun Oct 26 01:23:07 2014
@@ -102,7 +102,7 @@ static void *MsanAllocate(StackTrace *st
} else if (flags()->poison_in_malloc) {
__msan_poison(allocated, size);
if (__msan_get_track_origins()) {
- u32 stack_id = StackDepotPut(stack->trace, stack->size);
+ u32 stack_id = StackDepotPut(*stack);
CHECK(stack_id);
u32 id;
ChainedOriginDepotPut(stack_id, Origin::kHeapRoot, &id);
@@ -125,7 +125,7 @@ void MsanDeallocate(StackTrace *stack, v
if (flags()->poison_in_free) {
__msan_poison(p, size);
if (__msan_get_track_origins()) {
- u32 stack_id = StackDepotPut(stack->trace, stack->size);
+ u32 stack_id = StackDepotPut(*stack);
CHECK(stack_id);
u32 id;
ChainedOriginDepotPut(stack_id, Origin::kHeapRoot, &id);
Modified: compiler-rt/trunk/lib/msan/msan_interceptors.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/msan/msan_interceptors.cc?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/msan/msan_interceptors.cc (original)
+++ compiler-rt/trunk/lib/msan/msan_interceptors.cc Sun Oct 26 01:23:07 2014
@@ -842,7 +842,7 @@ void __msan_allocated_memory(const void*
if (flags()->poison_in_malloc)
__msan_poison(data, size);
if (__msan_get_track_origins()) {
- u32 stack_id = StackDepotPut(stack.trace, stack.size);
+ u32 stack_id = StackDepotPut(stack);
u32 id;
ChainedOriginDepotPut(stack_id, Origin::kHeapRoot, &id);
__msan_set_origin(data, size, Origin(id, 1).raw_id());
Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_stackdepot.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_stackdepot.cc?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_stackdepot.cc (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_stackdepot.cc Sun Oct 26 01:23:07 2014
@@ -18,32 +18,6 @@
namespace __sanitizer {
-// FIXME: Get rid of this class in favor of StackTrace.
-struct StackDepotDesc {
- const uptr *stack;
- uptr size;
- u32 hash() const {
- // murmur2
- const u32 m = 0x5bd1e995;
- const u32 seed = 0x9747b28c;
- const u32 r = 24;
- u32 h = seed ^ (size * sizeof(uptr));
- for (uptr i = 0; i < size; i++) {
- u32 k = stack[i];
- k *= m;
- k ^= k >> r;
- k *= m;
- h *= m;
- h ^= k;
- }
- h ^= h >> 13;
- h *= m;
- h ^= h >> 15;
- return h;
- }
- bool is_valid() { return size > 0 && stack; }
-};
-
struct StackDepotNode {
StackDepotNode *link;
u32 id;
@@ -59,14 +33,14 @@ struct StackDepotNode {
static const u32 kUseCountMask = (1 << kUseCountBits) - 1;
static const u32 kHashMask = ~kUseCountMask;
- typedef StackDepotDesc args_type;
+ typedef StackTrace args_type;
bool eq(u32 hash, const args_type &args) const {
u32 hash_bits =
atomic_load(&hash_and_use_count, memory_order_relaxed) & kHashMask;
if ((hash & kHashMask) != hash_bits || args.size != size) return false;
uptr i = 0;
for (; i < size; i++) {
- if (stack[i] != args.stack[i]) return false;
+ if (stack[i] != args.trace[i]) return false;
}
return true;
}
@@ -76,11 +50,10 @@ struct StackDepotNode {
void store(const args_type &args, u32 hash) {
atomic_store(&hash_and_use_count, hash & kHashMask, memory_order_relaxed);
size = args.size;
- internal_memcpy(stack, args.stack, size * sizeof(uptr));
+ internal_memcpy(stack, args.trace, size * sizeof(uptr));
}
args_type load() const {
- args_type ret = {&stack[0], size};
- return ret;
+ return args_type(&stack[0], size);
}
StackDepotHandle get_handle() { return StackDepotHandle(this); }
@@ -100,8 +73,6 @@ void StackDepotHandle::inc_use_count_uns
StackDepotNode::kUseCountMask;
CHECK_LT(prev + 1, StackDepotNode::kMaxUseCount);
}
-uptr StackDepotHandle::size() { return node_->size; }
-uptr *StackDepotHandle::stack() { return &node_->stack[0]; }
// FIXME(dvyukov): this single reserved bit is used in TSan.
typedef StackDepotBase<StackDepotNode, 1, StackDepotNode::kTabSizeLog>
@@ -112,20 +83,17 @@ StackDepotStats *StackDepotGetStats() {
return theDepot.GetStats();
}
-u32 StackDepotPut(const uptr *stack, uptr size) {
- StackDepotDesc desc = {stack, size};
- StackDepotHandle h = theDepot.Put(desc);
+u32 StackDepotPut(StackTrace stack) {
+ StackDepotHandle h = theDepot.Put(stack);
return h.valid() ? h.id() : 0;
}
-StackDepotHandle StackDepotPut_WithHandle(const uptr *stack, uptr size) {
- StackDepotDesc desc = {stack, size};
- return theDepot.Put(desc);
+StackDepotHandle StackDepotPut_WithHandle(StackTrace stack) {
+ return theDepot.Put(stack);
}
StackTrace StackDepotGet(u32 id) {
- StackDepotDesc desc = theDepot.Get(id);
- return StackTrace(desc.stack, desc.size);
+ return theDepot.Get(id);
}
void StackDepotLockAll() {
@@ -156,18 +124,15 @@ StackDepotReverseMap::StackDepotReverseM
InternalSort(&map_, map_.size(), IdDescPair::IdComparator);
}
-const uptr *StackDepotReverseMap::Get(u32 id, uptr *size) {
- if (!map_.size()) return 0;
+StackTrace StackDepotReverseMap::Get(u32 id) {
+ if (!map_.size())
+ return StackTrace();
IdDescPair pair = {id, 0};
uptr idx = InternalBinarySearch(map_, 0, map_.size(), pair,
IdDescPair::IdComparator);
- if (idx > map_.size()) {
- *size = 0;
- return 0;
- }
- StackDepotNode *desc = map_[idx].desc;
- *size = desc->size;
- return desc->stack;
+ if (idx > map_.size())
+ return StackTrace();
+ return map_[idx].desc->load();
}
} // namespace __sanitizer
Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_stackdepot.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_stackdepot.h?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_stackdepot.h (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_stackdepot.h Sun Oct 26 01:23:07 2014
@@ -29,16 +29,13 @@ struct StackDepotHandle {
u32 id();
int use_count();
void inc_use_count_unsafe();
- uptr size();
- uptr *stack();
};
const int kStackDepotMaxUseCount = 1U << 20;
StackDepotStats *StackDepotGetStats();
-// FIXME: Pass StackTrace as an input argument here.
-u32 StackDepotPut(const uptr *stack, uptr size);
-StackDepotHandle StackDepotPut_WithHandle(const uptr *stack, uptr size);
+u32 StackDepotPut(StackTrace stack);
+StackDepotHandle StackDepotPut_WithHandle(StackTrace stack);
// Retrieves a stored stack trace by the id.
StackTrace StackDepotGet(u32 id);
@@ -52,7 +49,7 @@ void StackDepotUnlockAll();
class StackDepotReverseMap {
public:
StackDepotReverseMap();
- const uptr *Get(u32 id, uptr *size);
+ StackTrace Get(u32 id);
private:
struct IdDescPair {
Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_stacktrace.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_stacktrace.h?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_stacktrace.h (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_stacktrace.h Sun Oct 26 01:23:07 2014
@@ -33,11 +33,33 @@ struct StackTrace {
const uptr *trace;
uptr size;
+ StackTrace() : trace(nullptr), size(0) {}
StackTrace(const uptr *trace, uptr size) : trace(trace), size(size) {}
// Prints a symbolized stacktrace, followed by an empty line.
void Print() const;
+ u32 hash() const {
+ // murmur2
+ const u32 m = 0x5bd1e995;
+ const u32 seed = 0x9747b28c;
+ const u32 r = 24;
+ u32 h = seed ^ (size * sizeof(uptr));
+ for (uptr i = 0; i < size; i++) {
+ u32 k = trace[i];
+ k *= m;
+ k ^= k >> r;
+ k *= m;
+ h *= m;
+ h ^= k;
+ }
+ h ^= h >> 13;
+ h *= m;
+ h ^= h >> 15;
+ return h;
+ }
+ bool is_valid() const { return size > 0 && trace; }
+
static bool WillUseFastUnwind(bool request_fast_unwind) {
// Check if fast unwind is available. Fast unwind is the only option on Mac.
// It is also the only option on FreeBSD as the slow unwinding that
Modified: compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_stackdepot_test.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_stackdepot_test.cc?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_stackdepot_test.cc (original)
+++ compiler-rt/trunk/lib/sanitizer_common/tests/sanitizer_stackdepot_test.cc Sun Oct 26 01:23:07 2014
@@ -18,12 +18,13 @@
namespace __sanitizer {
TEST(SanitizerCommon, StackDepotBasic) {
- uptr s1[] = {1, 2, 3, 4, 5};
- u32 i1 = StackDepotPut(s1, ARRAY_SIZE(s1));
+ uptr array[] = {1, 2, 3, 4, 5};
+ StackTrace s1(array, ARRAY_SIZE(array));
+ u32 i1 = StackDepotPut(s1);
StackTrace stack = StackDepotGet(i1);
EXPECT_NE(stack.trace, (uptr*)0);
- EXPECT_EQ(ARRAY_SIZE(s1), stack.size);
- EXPECT_EQ(0, internal_memcmp(stack.trace, s1, sizeof(s1)));
+ EXPECT_EQ(ARRAY_SIZE(array), stack.size);
+ EXPECT_EQ(0, internal_memcmp(stack.trace, array, sizeof(array)));
}
TEST(SanitizerCommon, StackDepotAbsent) {
@@ -32,7 +33,7 @@ TEST(SanitizerCommon, StackDepotAbsent)
}
TEST(SanitizerCommon, StackDepotEmptyStack) {
- u32 i1 = StackDepotPut(0, 0);
+ u32 i1 = StackDepotPut(StackTrace());
StackTrace stack = StackDepotGet(i1);
EXPECT_EQ((uptr*)0, stack.trace);
}
@@ -43,44 +44,49 @@ TEST(SanitizerCommon, StackDepotZeroId)
}
TEST(SanitizerCommon, StackDepotSame) {
- uptr s1[] = {1, 2, 3, 4, 6};
- u32 i1 = StackDepotPut(s1, ARRAY_SIZE(s1));
- u32 i2 = StackDepotPut(s1, ARRAY_SIZE(s1));
+ uptr array[] = {1, 2, 3, 4, 6};
+ StackTrace s1(array, ARRAY_SIZE(array));
+ u32 i1 = StackDepotPut(s1);
+ u32 i2 = StackDepotPut(s1);
EXPECT_EQ(i1, i2);
StackTrace stack = StackDepotGet(i1);
EXPECT_NE(stack.trace, (uptr*)0);
- EXPECT_EQ(ARRAY_SIZE(s1), stack.size);
- EXPECT_EQ(0, internal_memcmp(stack.trace, s1, sizeof(s1)));
+ EXPECT_EQ(ARRAY_SIZE(array), stack.size);
+ EXPECT_EQ(0, internal_memcmp(stack.trace, array, sizeof(array)));
}
TEST(SanitizerCommon, StackDepotSeveral) {
- uptr s1[] = {1, 2, 3, 4, 7};
- u32 i1 = StackDepotPut(s1, ARRAY_SIZE(s1));
- uptr s2[] = {1, 2, 3, 4, 8, 9};
- u32 i2 = StackDepotPut(s2, ARRAY_SIZE(s2));
+ uptr array1[] = {1, 2, 3, 4, 7};
+ StackTrace s1(array1, ARRAY_SIZE(array1));
+ u32 i1 = StackDepotPut(s1);
+ uptr array2[] = {1, 2, 3, 4, 8, 9};
+ StackTrace s2(array2, ARRAY_SIZE(array2));
+ u32 i2 = StackDepotPut(s2);
EXPECT_NE(i1, i2);
}
TEST(SanitizerCommon, StackDepotReverseMap) {
- uptr s1[] = {1, 2, 3, 4, 5};
- uptr s2[] = {7, 1, 3, 0};
- uptr s3[] = {10, 2, 5, 3};
- uptr s4[] = {1, 3, 2, 5};
+ uptr array1[] = {1, 2, 3, 4, 5};
+ uptr array2[] = {7, 1, 3, 0};
+ uptr array3[] = {10, 2, 5, 3};
+ uptr array4[] = {1, 3, 2, 5};
u32 ids[4] = {0};
- ids[0] = StackDepotPut(s1, ARRAY_SIZE(s1));
- ids[1] = StackDepotPut(s2, ARRAY_SIZE(s2));
- ids[2] = StackDepotPut(s3, ARRAY_SIZE(s3));
- ids[3] = StackDepotPut(s4, ARRAY_SIZE(s4));
+ StackTrace s1(array1, ARRAY_SIZE(array1));
+ StackTrace s2(array2, ARRAY_SIZE(array2));
+ StackTrace s3(array3, ARRAY_SIZE(array3));
+ StackTrace s4(array4, ARRAY_SIZE(array4));
+ ids[0] = StackDepotPut(s1);
+ ids[1] = StackDepotPut(s2);
+ ids[2] = StackDepotPut(s3);
+ ids[3] = StackDepotPut(s4);
StackDepotReverseMap map;
for (uptr i = 0; i < 4; i++) {
- uptr sz_map;
- const uptr *sp_map;
StackTrace stack = StackDepotGet(ids[i]);
- sp_map = map.Get(ids[i], &sz_map);
- EXPECT_EQ(stack.size, sz_map);
- EXPECT_EQ(stack.trace, sp_map);
+ StackTrace from_map = map.Get(ids[i]);
+ EXPECT_EQ(stack.size, from_map.size);
+ EXPECT_EQ(stack.trace, from_map.trace);
}
}
Modified: compiler-rt/trunk/lib/tsan/dd/dd_rtl.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/dd/dd_rtl.cc?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/dd/dd_rtl.cc (original)
+++ compiler-rt/trunk/lib/tsan/dd/dd_rtl.cc Sun Oct 26 01:23:07 2014
@@ -19,13 +19,13 @@ namespace __dsan {
static Context *ctx;
static u32 CurrentStackTrace(Thread *thr, uptr skip) {
- BufferedStackTrace trace;
+ BufferedStackTrace stack;
thr->ignore_interceptors = true;
- trace.Unwind(1000, 0, 0, 0, 0, 0, false);
+ stack.Unwind(1000, 0, 0, 0, 0, 0, false);
thr->ignore_interceptors = false;
- if (trace.size <= skip)
+ if (stack.size <= skip)
return 0;
- return StackDepotPut(trace.trace + skip, trace.size - skip);
+ return StackDepotPut(StackTrace(stack.trace + skip, stack.size - skip));
}
static void PrintStackTrace(Thread *thr, u32 stk) {
Modified: compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.cc?rev=220637&r1=220636&r2=220637&view=diff
==============================================================================
--- compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.cc (original)
+++ compiler-rt/trunk/lib/tsan/rtl/tsan_rtl.cc Sun Oct 26 01:23:07 2014
@@ -462,8 +462,8 @@ u32 CurrentStackId(ThreadState *thr, upt
thr->shadow_stack_pos[0] = pc;
thr->shadow_stack_pos++;
}
- u32 id = StackDepotPut(thr->shadow_stack,
- thr->shadow_stack_pos - thr->shadow_stack);
+ u32 id = StackDepotPut(__sanitizer::StackTrace(
+ thr->shadow_stack, thr->shadow_stack_pos - thr->shadow_stack));
if (pc != 0)
thr->shadow_stack_pos--;
return id;
More information about the llvm-commits
mailing list