[compiler-rt] [compiler-rt][msan] intercept a subset of non standard FreeBSD's allo… (PR #81773)

David CARLIER via llvm-commits llvm-commits at lists.llvm.org
Wed Feb 14 10:20:11 PST 2024


https://github.com/devnexen created https://github.com/llvm/llvm-project/pull/81773

…cator api.

FreeBSD's allocator being jemalloc, its userland uses a subset of its non-standard api.

>From b84c87fc980f6db28ae6207887003dccfa00c294 Mon Sep 17 00:00:00 2001
From: David Carlier <devnexen at gmail.com>
Date: Wed, 14 Feb 2024 18:16:28 +0000
Subject: [PATCH] [compiler-rt][msan] intercept a subset of non standard
 FreeBSD's allocator api.

FreeBSD's allocator being jemalloc, its userland uses a subset of its
non-standard api.
---
 compiler-rt/lib/msan/msan.h                |  6 +--
 compiler-rt/lib/msan/msan_allocator.cpp    | 27 ++++++----
 compiler-rt/lib/msan/msan_interceptors.cpp | 62 ++++++++++++++++++++--
 compiler-rt/lib/msan/msan_new_delete.cpp   |  4 +-
 4 files changed, 79 insertions(+), 20 deletions(-)

diff --git a/compiler-rt/lib/msan/msan.h b/compiler-rt/lib/msan/msan.h
index 710447a3e1a357..006d35146c0592 100644
--- a/compiler-rt/lib/msan/msan.h
+++ b/compiler-rt/lib/msan/msan.h
@@ -255,11 +255,11 @@ char *GetProcSelfMaps();
 void InitializeInterceptors();
 
 void MsanAllocatorInit();
-void MsanDeallocate(BufferedStackTrace *stack, void *ptr);
+void MsanDeallocate(BufferedStackTrace *stack, void *ptr, bool zeroise);
 
-void *msan_malloc(uptr size, BufferedStackTrace *stack);
+void *msan_malloc(uptr size, BufferedStackTrace *stack, bool zeroise);
 void *msan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack);
-void *msan_realloc(void *ptr, uptr size, BufferedStackTrace *stack);
+void *msan_realloc(void *ptr, uptr size, BufferedStackTrace *stack, bool zeroise);
 void *msan_reallocarray(void *ptr, uptr nmemb, uptr size,
                         BufferedStackTrace *stack);
 void *msan_valloc(uptr size, BufferedStackTrace *stack);
diff --git a/compiler-rt/lib/msan/msan_allocator.cpp b/compiler-rt/lib/msan/msan_allocator.cpp
index 0b2dd2b2f1883d..d686a0d14e3737 100644
--- a/compiler-rt/lib/msan/msan_allocator.cpp
+++ b/compiler-rt/lib/msan/msan_allocator.cpp
@@ -232,7 +232,7 @@ static void *MsanAllocate(BufferedStackTrace *stack, uptr size, uptr alignment,
   return allocated;
 }
 
-void MsanDeallocate(BufferedStackTrace *stack, void *p) {
+void MsanDeallocate(BufferedStackTrace *stack, void *p, bool zeroise) {
   CHECK(p);
   UnpoisonParam(1);
   RunFreeHooks(p);
@@ -250,6 +250,11 @@ void MsanDeallocate(BufferedStackTrace *stack, void *p) {
       Origin o = Origin::CreateHeapOrigin(stack);
       __msan_set_origin(p, size, o.raw_id());
     }
+  } else {
+    if (allocator.FromPrimary(p))
+      __msan_clear_and_unpoison(p, size);
+    else
+      __msan_unpoison(p, size);  // Mem is already zeroed.
   }
   MsanThread *t = GetCurrentThread();
   if (t) {
@@ -263,7 +268,7 @@ void MsanDeallocate(BufferedStackTrace *stack, void *p) {
 }
 
 static void *MsanReallocate(BufferedStackTrace *stack, void *old_p,
-                            uptr new_size, uptr alignment) {
+                            uptr new_size, uptr alignment, bool zeroise) {
   Metadata *meta = reinterpret_cast<Metadata*>(allocator.GetMetaData(old_p));
   uptr old_size = meta->requested_size;
   uptr actually_allocated_size = allocator.GetActuallyAllocatedSize(old_p);
@@ -279,10 +284,10 @@ static void *MsanReallocate(BufferedStackTrace *stack, void *old_p,
     return old_p;
   }
   uptr memcpy_size = Min(new_size, old_size);
-  void *new_p = MsanAllocate(stack, new_size, alignment, false /*zeroise*/);
+  void *new_p = MsanAllocate(stack, new_size, alignment, zeroise);
   if (new_p) {
     CopyMemory(new_p, old_p, memcpy_size, stack);
-    MsanDeallocate(stack, old_p);
+    MsanDeallocate(stack, old_p, zeroise);
   }
   return new_p;
 }
@@ -324,22 +329,22 @@ static uptr AllocationSizeFast(const void *p) {
   return reinterpret_cast<Metadata *>(allocator.GetMetaData(p))->requested_size;
 }
 
-void *msan_malloc(uptr size, BufferedStackTrace *stack) {
-  return SetErrnoOnNull(MsanAllocate(stack, size, sizeof(u64), false));
+void *msan_malloc(uptr size, BufferedStackTrace *stack, bool zeroise) {
+  return SetErrnoOnNull(MsanAllocate(stack, size, sizeof(u64), zeroise));
 }
 
 void *msan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) {
   return SetErrnoOnNull(MsanCalloc(stack, nmemb, size));
 }
 
-void *msan_realloc(void *ptr, uptr size, BufferedStackTrace *stack) {
+void *msan_realloc(void *ptr, uptr size, BufferedStackTrace *stack, bool zeroise) {
   if (!ptr)
-    return SetErrnoOnNull(MsanAllocate(stack, size, sizeof(u64), false));
+    return SetErrnoOnNull(MsanAllocate(stack, size, sizeof(u64), zeroise));
   if (size == 0) {
-    MsanDeallocate(stack, ptr);
+    MsanDeallocate(stack, ptr, zeroise);
     return nullptr;
   }
-  return SetErrnoOnNull(MsanReallocate(stack, ptr, size, sizeof(u64)));
+  return SetErrnoOnNull(MsanReallocate(stack, ptr, size, sizeof(u64), zeroise));
 }
 
 void *msan_reallocarray(void *ptr, uptr nmemb, uptr size,
@@ -351,7 +356,7 @@ void *msan_reallocarray(void *ptr, uptr nmemb, uptr size,
     GET_FATAL_STACK_TRACE_IF_EMPTY(stack);
     ReportReallocArrayOverflow(nmemb, size, stack);
   }
-  return msan_realloc(ptr, nmemb * size, stack);
+  return msan_realloc(ptr, nmemb * size, stack, false);
 }
 
 void *msan_valloc(uptr size, BufferedStackTrace *stack) {
diff --git a/compiler-rt/lib/msan/msan_interceptors.cpp b/compiler-rt/lib/msan/msan_interceptors.cpp
index 2c9f2c01e14b06..d1ed3a85166193 100644
--- a/compiler-rt/lib/msan/msan_interceptors.cpp
+++ b/compiler-rt/lib/msan/msan_interceptors.cpp
@@ -216,7 +216,7 @@ INTERCEPTOR(void, free, void *ptr) {
   if (DlsymAlloc::PointerIsMine(ptr))
     return DlsymAlloc::Free(ptr);
   GET_MALLOC_STACK_TRACE;
-  MsanDeallocate(&stack, ptr);
+  MsanDeallocate(&stack, ptr, false);
 }
 
 #if !SANITIZER_FREEBSD && !SANITIZER_NETBSD
@@ -226,7 +226,7 @@ INTERCEPTOR(void, cfree, void *ptr) {
   if (DlsymAlloc::PointerIsMine(ptr))
     return DlsymAlloc::Free(ptr);
   GET_MALLOC_STACK_TRACE;
-  MsanDeallocate(&stack, ptr);
+  MsanDeallocate(&stack, ptr, false);
 }
 #  define MSAN_MAYBE_INTERCEPT_CFREE INTERCEPT_FUNCTION(cfree)
 #else
@@ -294,6 +294,56 @@ INTERCEPTOR(void, malloc_stats, void) {
 #define MSAN_MAYBE_INTERCEPT_MALLOC_STATS
 #endif
 
+#if SANITIZER_FREEBSD
+
+// Is the only flag worths replicating
+#define MSAN_MALLOCX_ZERO 0x40
+
+INTERCEPTOR(void *, mallocx, SIZE_T size, int flags) {
+  bool zeroise = (flags & MSAN_MALLOCX_ZERO);
+  if (DlsymAlloc::Use())
+    return zeroise ? DlsymAlloc::Allocate(size) : DlsymAlloc::Callocate(1, size);
+  GET_MALLOC_STACK_TRACE;
+  return msan_malloc(size, &stack, zeroise);
+}
+
+INTERCEPTOR(void *, rallocx, void *ptr, SIZE_T size, int flags) {
+  bool zeroise = (flags & MSAN_MALLOCX_ZERO);
+  if (DlsymAlloc::Use() || DlsymAlloc::PointerIsMine(ptr)) {
+    void *newptr = DlsymAlloc::Realloc(ptr, size);
+    if (newptr && zeroise)
+      __msan_memset(newptr, 0, size);
+    return newptr;
+  }
+  GET_MALLOC_STACK_TRACE;
+  return msan_realloc(ptr, size, &stack, zeroise);
+}
+
+INTERCEPTOR(uptr, sallocx, void *ptr, int flags) {
+  (void)flags;
+  return __sanitizer_get_allocated_size(ptr);
+}
+
+INTERCEPTOR(void, dallocx, void *ptr, int flags) {
+  if (UNLIKELY(!ptr))
+    return;
+  if (DlsymAlloc::PointerIsMine(ptr))
+    return DlsymAlloc::Free(ptr);
+  GET_MALLOC_STACK_TRACE;
+  MsanDeallocate(&stack, ptr, (flags & MSAN_MALLOCX_ZERO));
+}
+
+#define MSAN_MAYBE_INTERCEPT_MALLOCX INTERCEPT_FUNCTION(mallocx)
+#define MSAN_MAYBE_INTERCEPT_RALLOCX INTERCEPT_FUNCTION(rallocx)
+#define MSAN_MAYBE_INTERCEPT_SALLOCX INTERCEPT_FUNCTION(sallocx)
+#define MSAN_MAYBE_INTERCEPT_DALLOCX INTERCEPT_FUNCTION(dallocx)
+#else
+#define MSAN_MAYBE_INTERCEPT_MALLOCX
+#define MSAN_MAYBE_INTERCEPT_RALLOCX
+#define MSAN_MAYBE_INTERCEPT_SALLOCX
+#define MSAN_MAYBE_INTERCEPT_DALLOCX
+#endif
+
 INTERCEPTOR(char *, strcpy, char *dest, const char *src) {
   ENSURE_MSAN_INITED();
   GET_STORE_STACK_TRACE;
@@ -1007,7 +1057,7 @@ INTERCEPTOR(void *, realloc, void *ptr, SIZE_T size) {
   if (DlsymAlloc::Use() || DlsymAlloc::PointerIsMine(ptr))
     return DlsymAlloc::Realloc(ptr, size);
   GET_MALLOC_STACK_TRACE;
-  return msan_realloc(ptr, size, &stack);
+  return msan_realloc(ptr, size, &stack, false);
 }
 
 INTERCEPTOR(void *, reallocarray, void *ptr, SIZE_T nmemb, SIZE_T size) {
@@ -1019,7 +1069,7 @@ INTERCEPTOR(void *, malloc, SIZE_T size) {
   if (DlsymAlloc::Use())
     return DlsymAlloc::Allocate(size);
   GET_MALLOC_STACK_TRACE;
-  return msan_malloc(size, &stack);
+  return msan_malloc(size, &stack, false);
 }
 
 void __msan_allocated_memory(const void *data, uptr size) {
@@ -1783,6 +1833,10 @@ void InitializeInterceptors() {
   MSAN_MAYBE_INTERCEPT_MALLINFO2;
   MSAN_MAYBE_INTERCEPT_MALLOPT;
   MSAN_MAYBE_INTERCEPT_MALLOC_STATS;
+  MSAN_MAYBE_INTERCEPT_MALLOCX;
+  MSAN_MAYBE_INTERCEPT_RALLOCX;
+  MSAN_MAYBE_INTERCEPT_SALLOCX;
+  MSAN_MAYBE_INTERCEPT_DALLOCX;
   INTERCEPT_FUNCTION(fread);
   MSAN_MAYBE_INTERCEPT_FREAD_UNLOCKED;
   INTERCEPT_FUNCTION(memccpy);
diff --git a/compiler-rt/lib/msan/msan_new_delete.cpp b/compiler-rt/lib/msan/msan_new_delete.cpp
index 7daa55474b7dae..a29c166d0ac46e 100644
--- a/compiler-rt/lib/msan/msan_new_delete.cpp
+++ b/compiler-rt/lib/msan/msan_new_delete.cpp
@@ -32,7 +32,7 @@ namespace std {
 // TODO(alekseys): throw std::bad_alloc instead of dying on OOM.
 #  define OPERATOR_NEW_BODY(nothrow)          \
     GET_MALLOC_STACK_TRACE;                   \
-    void *res = msan_malloc(size, &stack);    \
+    void *res = msan_malloc(size, &stack, false);    \
     if (!nothrow && UNLIKELY(!res)) {         \
       GET_FATAL_STACK_TRACE_IF_EMPTY(&stack); \
       ReportOutOfMemory(size, &stack);        \
@@ -74,7 +74,7 @@ void *operator new[](size_t size, std::align_val_t align, std::nothrow_t const&)
 
 #define OPERATOR_DELETE_BODY \
   GET_MALLOC_STACK_TRACE; \
-  if (ptr) MsanDeallocate(&stack, ptr)
+  if (ptr) MsanDeallocate(&stack, ptr, false)
 
 INTERCEPTOR_ATTRIBUTE
 void operator delete(void *ptr) NOEXCEPT { OPERATOR_DELETE_BODY; }



More information about the llvm-commits mailing list