[llvm-commits] [compiler-rt] r169864 - in /compiler-rt/trunk/lib: asan/asan_allocator.cc asan/asan_allocator2.cc sanitizer_common/sanitizer_common.h

Kostya Serebryany kcc at google.com
Tue Dec 11 06:41:32 PST 2012


Author: kcc
Date: Tue Dec 11 08:41:31 2012
New Revision: 169864

URL: http://llvm.org/viewvc/llvm-project?rev=169864&view=rev
Log:
[asan] more code for asan_allocator2: basic Allocate/Deallocate code

Modified:
    compiler-rt/trunk/lib/asan/asan_allocator.cc
    compiler-rt/trunk/lib/asan/asan_allocator2.cc
    compiler-rt/trunk/lib/sanitizer_common/sanitizer_common.h

Modified: compiler-rt/trunk/lib/asan/asan_allocator.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_allocator.cc?rev=169864&r1=169863&r2=169864&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_allocator.cc (original)
+++ compiler-rt/trunk/lib/asan/asan_allocator.cc Tue Dec 11 08:41:31 2012
@@ -57,11 +57,6 @@
 static const uptr kMaxAllowedMallocSize =
     (SANITIZER_WORDSIZE == 32) ? 3UL << 30 : 8UL << 30;
 
-static inline bool IsAligned(uptr a, uptr alignment) {
-  return (a & (alignment - 1)) == 0;
-}
-
-
 static inline uptr SizeClassToSize(u8 size_class) {
   CHECK(size_class < kNumberOfSizeClasses);
   if (size_class <= kMallocSizeClassStepLog) {

Modified: compiler-rt/trunk/lib/asan/asan_allocator2.cc
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/asan_allocator2.cc?rev=169864&r1=169863&r2=169864&view=diff
==============================================================================
--- compiler-rt/trunk/lib/asan/asan_allocator2.cc (original)
+++ compiler-rt/trunk/lib/asan/asan_allocator2.cc Tue Dec 11 08:41:31 2012
@@ -18,6 +18,8 @@
 #include "asan_allocator.h"
 #if ASAN_ALLOCATOR_VERSION == 2
 
+#include "asan_thread.h"
+#include "asan_thread_registry.h"
 #include "sanitizer/asan_interface.h"
 #include "sanitizer_common/sanitizer_allocator.h"
 #include "sanitizer_common/sanitizer_internal_defs.h"
@@ -40,15 +42,158 @@
 typedef CombinedAllocator<PrimaryAllocator, AllocatorCache,
     SecondaryAllocator> Allocator;
 
+static THREADLOCAL AllocatorCache cache;
+static Allocator allocator;
 
-uptr AsanChunkView::Beg() { return 0; }
+static const uptr kMaxAllowedMallocSize =
+    (SANITIZER_WORDSIZE == 32) ? 3UL << 30 : 8UL << 30;
+
+static int inited = 0;
+
+static void Init() {
+  if (inited) return;
+  __asan_init();
+  inited = true;  // this must happen before any threads are created.
+  allocator.Init();
+}
+
+// Every chunk of memory allocated by this allocator can be in one of 3 states:
+// CHUNK_AVAILABLE: the chunk is in the free list and ready to be allocated.
+// CHUNK_ALLOCATED: the chunk is allocated and not yet freed.
+// CHUNK_QUARANTINE: the chunk was freed and put into quarantine zone.
+enum {
+  CHUNK_AVAILABLE  = 1,
+  CHUNK_ALLOCATED  = 2,
+  CHUNK_QUARANTINE = 3
+};
+
+// The memory chunk allocated from the underlying allocator looks like this:
+// L L L L L L H H U U U U U U R R
+//   L -- left redzone words (0 or more bytes)
+//   H -- ChunkHeader (16 bytes on 64-bit arch, 8 bytes on 32-bit arch).
+//     ChunkHeader is also a part of the left redzone.
+//   U -- user memory.
+//   R -- right redzone (0 or more bytes)
+// ChunkBase consists of ChunkHeader and other bytes that overlap with user
+// memory.
+
+#if SANITIZER_WORDSIZE == 64
+struct ChunkBase {
+  // 1-st 8 bytes.
+  uptr chunk_state       : 8;  // Must be first.
+  uptr alloc_tid         : 24;
+  uptr free_tid          : 24;
+  uptr from_memalign     : 1;
+  // 2-nd 8 bytes
+  uptr user_requested_size;
+  // End of ChunkHeader.
+  // 3-rd 8 bytes. These overlap with the user memory.
+  AsanChunk *next;
+};
+
+static const uptr kChunkHeaderSize = 16;
+COMPILER_CHECK(sizeof(ChunkBase) == 24);
+
+#elif SANITIZER_WORDSIZE == 32
+struct ChunkBase {
+  // 1-st 8 bytes.
+  uptr chunk_state       : 8;  // Must be first.
+  uptr from_memalign     : 1;
+  uptr alloc_tid         : 23;
+  uptr user_requested_size;
+  // End of ChunkHeader.
+  // 2-nd 8 bytes. These overlap with the user memory.
+  AsanChunk *next;
+  uptr  free_tid;
+};
+
+COMPILER_CHECK(sizeof(ChunkBase) == 16);
+static const uptr kChunkHeaderSize = 8;
+#endif
+
+struct AsanChunk: ChunkBase {
+  uptr Beg() { return reinterpret_cast<uptr>(this) + kChunkHeaderSize; }
+  uptr UsedSize() { return user_requested_size; }
+};
+
+uptr AsanChunkView::Beg() { return chunk_->Beg(); }
 uptr AsanChunkView::End() { return Beg() + UsedSize(); }
-uptr AsanChunkView::UsedSize() { return 0; }
-uptr AsanChunkView::AllocTid() { return 0; }
-uptr AsanChunkView::FreeTid() { return 0; }
+uptr AsanChunkView::UsedSize() { return chunk_->UsedSize(); }
+uptr AsanChunkView::AllocTid() { return chunk_->alloc_tid; }
+uptr AsanChunkView::FreeTid() { return chunk_->free_tid; }
+
+void AsanChunkView::GetAllocStack(StackTrace *stack) {
+  stack->size = 0;
+}
+
+void AsanChunkView::GetFreeStack(StackTrace *stack) {
+  stack->size = 0;
+}
+
+static const uptr kReturnOnZeroMalloc = 0x0123;  // Zero page is protected.
+
+static uptr ComputeRZSize(uptr user_requested_size) {
+  // FIXME: implement adaptive redzones.
+  return flags()->redzone;
+}
+
+static void *Allocate(uptr size, uptr alignment, StackTrace *stack) {
+  Init();
+  CHECK(stack);
+  if (alignment < 8) alignment = 8;
+  if (size == 0)
+    return reinterpret_cast<void *>(kReturnOnZeroMalloc);
+  CHECK(IsPowerOfTwo(alignment));
+  uptr rz_size = ComputeRZSize(size);
+  uptr rounded_size = RoundUpTo(size, rz_size);
+  uptr needed_size = rounded_size + rz_size;
+  if (alignment > rz_size)
+    needed_size += alignment;
+  CHECK(IsAligned(needed_size, rz_size));
+  if (size > kMaxAllowedMallocSize || needed_size > kMaxAllowedMallocSize) {
+    Report("WARNING: AddressSanitizer failed to allocate %p bytes\n",
+           (void*)size);
+    return 0;
+  }
+
+  AsanThread *t = asanThreadRegistry().GetCurrent();
+  void *allocated = allocator.Allocate(&cache, needed_size, 8, false);
+  uptr alloc_beg = reinterpret_cast<uptr>(allocated);
+  uptr alloc_end = alloc_beg + needed_size;
+  uptr beg_plus_redzone = alloc_beg + rz_size;
+  uptr user_beg = beg_plus_redzone;
+  if (!IsAligned(user_beg, alignment))
+    user_beg = RoundUpTo(user_beg, alignment);
+  uptr user_end = user_beg + size;
+  CHECK_LE(user_end, alloc_end);
+  uptr chunk_beg = user_beg - kChunkHeaderSize;
+//  Printf("allocated: %p beg_plus_redzone %p chunk_beg %p\n",
+//         allocated, beg_plus_redzone, chunk_beg);
+  AsanChunk *m = reinterpret_cast<AsanChunk *>(chunk_beg);
+  m->chunk_state = CHUNK_ALLOCATED;
+  u32 alloc_tid = t ? t->tid() : 0;
+  m->alloc_tid = alloc_tid;
+  CHECK_EQ(alloc_tid, m->alloc_tid);  // Does alloc_tid fit into the bitfield?
+  m->from_memalign = user_beg != beg_plus_redzone;
+  m->user_requested_size = size;
+
+  void *res = reinterpret_cast<void *>(user_beg);
+  ASAN_MALLOC_HOOK(res, size);
+  return res;
+}
+
+static void Deallocate(void *ptr, StackTrace *stack) {
+  uptr p = reinterpret_cast<uptr>(ptr);
+  if (p == 0 || p == kReturnOnZeroMalloc) return;
+  uptr chunk_beg = p - kChunkHeaderSize;
+  AsanChunk *m = reinterpret_cast<AsanChunk *>(chunk_beg);
+  uptr alloc_beg = p - ComputeRZSize(m->user_requested_size);
+  if (m->from_memalign)
+    alloc_beg = reinterpret_cast<uptr>(allocator.GetBlockBegin(ptr));
+  ASAN_FREE_HOOK(ptr);
+  allocator.Deallocate(&cache, reinterpret_cast<void *>(alloc_beg));
+}
 
-void AsanChunkView::GetAllocStack(StackTrace *stack) { }
-void AsanChunkView::GetFreeStack(StackTrace *stack) { }
 AsanChunkView FindHeapChunkByAddress(uptr address) {
   UNIMPLEMENTED();
   return AsanChunkView(0);
@@ -60,20 +205,18 @@
 
 SANITIZER_INTERFACE_ATTRIBUTE
 void *asan_memalign(uptr alignment, uptr size, StackTrace *stack) {
-  UNIMPLEMENTED();
-  return 0;
+  return Allocate(size, alignment, stack);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
 void asan_free(void *ptr, StackTrace *stack) {
-  UNIMPLEMENTED();
+  Deallocate(ptr, stack);
   return;
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
 void *asan_malloc(uptr size, StackTrace *stack) {
-  UNIMPLEMENTED();
-  return 0;
+  return Allocate(size, 8, stack);
 }
 
 void *asan_calloc(uptr nmemb, uptr size, StackTrace *stack) {

Modified: compiler-rt/trunk/lib/sanitizer_common/sanitizer_common.h
URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/sanitizer_common/sanitizer_common.h?rev=169864&r1=169863&r2=169864&view=diff
==============================================================================
--- compiler-rt/trunk/lib/sanitizer_common/sanitizer_common.h (original)
+++ compiler-rt/trunk/lib/sanitizer_common/sanitizer_common.h Tue Dec 11 08:41:31 2012
@@ -160,6 +160,9 @@
   CHECK(IsPowerOfTwo(boundary));
   return (size + boundary - 1) & ~(boundary - 1);
 }
+INLINE bool IsAligned(uptr a, uptr alignment) {
+  return (a & (alignment - 1)) == 0;
+}
 // Don't use std::min, std::max or std::swap, to minimize dependency
 // on libstdc++.
 template<class T> T Min(T a, T b) { return a < b ? a : b; }





More information about the llvm-commits mailing list