[compiler-rt] Reapply "[tsan] Don't use `enum __tsan_memory_order` in tsan interface"" (PR #115034)

Vitaly Buka via llvm-commits llvm-commits at lists.llvm.org
Tue Nov 5 10:54:08 PST 2024


https://github.com/vitalybuka updated https://github.com/llvm/llvm-project/pull/115034

>From d1e90e3e41ee6e5503835ee7deecb9b899300542 Mon Sep 17 00:00:00 2001
From: Vitaly Buka <vitalybuka at gmail.com>
Date: Tue, 5 Nov 2024 09:44:36 -0800
Subject: [PATCH] =?UTF-8?q?Revert=20"Revert=20"[tsan]=20Don't=20use=20`enu?=
 =?UTF-8?q?m=20=5F=5Ftsan=5Fmemory=5Forder`=20in=20tsan=20interface?=
 =?UTF-8?q?=E2=80=A6"?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

This reverts commit b14c436311e3ff78f61dd59c90486432d13bf38e.
---
 .../include/sanitizer/tsan_interface_atomic.h | 169 ++++++-----
 compiler-rt/lib/tsan/rtl/tsan_interface.h     | 136 ++++-----
 .../lib/tsan/rtl/tsan_interface_atomic.cpp    | 263 +++++++++---------
 3 files changed, 282 insertions(+), 286 deletions(-)

diff --git a/compiler-rt/include/sanitizer/tsan_interface_atomic.h b/compiler-rt/include/sanitizer/tsan_interface_atomic.h
index de3a1c3936097d..74ed91efade040 100644
--- a/compiler-rt/include/sanitizer/tsan_interface_atomic.h
+++ b/compiler-rt/include/sanitizer/tsan_interface_atomic.h
@@ -43,183 +43,178 @@ typedef enum {
 } __tsan_memory_order;
 
 __tsan_atomic8 SANITIZER_CDECL
-__tsan_atomic8_load(const volatile __tsan_atomic8 *a, __tsan_memory_order mo);
+__tsan_atomic8_load(const volatile __tsan_atomic8 *a, int mo);
 __tsan_atomic16 SANITIZER_CDECL
-__tsan_atomic16_load(const volatile __tsan_atomic16 *a, __tsan_memory_order mo);
+__tsan_atomic16_load(const volatile __tsan_atomic16 *a, int mo);
 __tsan_atomic32 SANITIZER_CDECL
-__tsan_atomic32_load(const volatile __tsan_atomic32 *a, __tsan_memory_order mo);
+__tsan_atomic32_load(const volatile __tsan_atomic32 *a, int mo);
 __tsan_atomic64 SANITIZER_CDECL
-__tsan_atomic64_load(const volatile __tsan_atomic64 *a, __tsan_memory_order mo);
+__tsan_atomic64_load(const volatile __tsan_atomic64 *a, int mo);
 #if __TSAN_HAS_INT128
-__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_load(
-    const volatile __tsan_atomic128 *a, __tsan_memory_order mo);
+__tsan_atomic128 SANITIZER_CDECL
+__tsan_atomic128_load(const volatile __tsan_atomic128 *a, int mo);
 #endif
 
 void SANITIZER_CDECL __tsan_atomic8_store(volatile __tsan_atomic8 *a,
-                                          __tsan_atomic8 v,
-                                          __tsan_memory_order mo);
+                                          __tsan_atomic8 v, int mo);
 void SANITIZER_CDECL __tsan_atomic16_store(volatile __tsan_atomic16 *a,
-                                           __tsan_atomic16 v,
-                                           __tsan_memory_order mo);
+                                           __tsan_atomic16 v, int mo);
 void SANITIZER_CDECL __tsan_atomic32_store(volatile __tsan_atomic32 *a,
-                                           __tsan_atomic32 v,
-                                           __tsan_memory_order mo);
+                                           __tsan_atomic32 v, int mo);
 void SANITIZER_CDECL __tsan_atomic64_store(volatile __tsan_atomic64 *a,
-                                           __tsan_atomic64 v,
-                                           __tsan_memory_order mo);
+                                           __tsan_atomic64 v, int mo);
 #if __TSAN_HAS_INT128
 void SANITIZER_CDECL __tsan_atomic128_store(volatile __tsan_atomic128 *a,
-                                            __tsan_atomic128 v,
-                                            __tsan_memory_order mo);
+                                            __tsan_atomic128 v, int mo);
 #endif
 
-__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_exchange(
-    volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
+__tsan_atomic8 SANITIZER_CDECL
+__tsan_atomic8_exchange(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
 __tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_exchange(
-    volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
+    volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
 __tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_exchange(
-    volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
+    volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
 __tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_exchange(
-    volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
+    volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
 #if __TSAN_HAS_INT128
 __tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_exchange(
-    volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
+    volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
 #endif
 
-__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_add(
-    volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
+__tsan_atomic8 SANITIZER_CDECL
+__tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
 __tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_add(
-    volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
+    volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
 __tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_add(
-    volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
+    volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
 __tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_add(
-    volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
+    volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
 #if __TSAN_HAS_INT128
 __tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_add(
-    volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
+    volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
 #endif
 
-__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_sub(
-    volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
+__tsan_atomic8 SANITIZER_CDECL
+__tsan_atomic8_fetch_sub(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
 __tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_sub(
-    volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
+    volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
 __tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_sub(
-    volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
+    volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
 __tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_sub(
-    volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
+    volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
 #if __TSAN_HAS_INT128
 __tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_sub(
-    volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
+    volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
 #endif
 
-__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_and(
-    volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
+__tsan_atomic8 SANITIZER_CDECL
+__tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
 __tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_and(
-    volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
+    volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
 __tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_and(
-    volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
+    volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
 __tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_and(
-    volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
+    volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
 #if __TSAN_HAS_INT128
 __tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_and(
-    volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
+    volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
 #endif
 
-__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_or(
-    volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
+__tsan_atomic8 SANITIZER_CDECL
+__tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
 __tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_or(
-    volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
+    volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
 __tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_or(
-    volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
+    volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
 __tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_or(
-    volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
+    volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
 #if __TSAN_HAS_INT128
 __tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_or(
-    volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
+    volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
 #endif
 
-__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_xor(
-    volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
+__tsan_atomic8 SANITIZER_CDECL
+__tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
 __tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_xor(
-    volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
+    volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
 __tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_xor(
-    volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
+    volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
 __tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_xor(
-    volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
+    volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
 #if __TSAN_HAS_INT128
 __tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_xor(
-    volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
+    volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
 #endif
 
-__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_nand(
-    volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
+__tsan_atomic8 SANITIZER_CDECL
+__tsan_atomic8_fetch_nand(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
 __tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_nand(
-    volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
+    volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
 __tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_nand(
-    volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
+    volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
 __tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_nand(
-    volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
+    volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
 #if __TSAN_HAS_INT128
 __tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_nand(
-    volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
+    volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
 #endif
 
 int SANITIZER_CDECL __tsan_atomic8_compare_exchange_weak(
-    volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v, int mo,
+    int fail_mo);
 int SANITIZER_CDECL __tsan_atomic16_compare_exchange_weak(
-    volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v, int mo,
+    int fail_mo);
 int SANITIZER_CDECL __tsan_atomic32_compare_exchange_weak(
-    volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v, int mo,
+    int fail_mo);
 int SANITIZER_CDECL __tsan_atomic64_compare_exchange_weak(
-    volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v, int mo,
+    int fail_mo);
 #if __TSAN_HAS_INT128
 int SANITIZER_CDECL __tsan_atomic128_compare_exchange_weak(
     volatile __tsan_atomic128 *a, __tsan_atomic128 *c, __tsan_atomic128 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    int mo, int fail_mo);
 #endif
 
 int SANITIZER_CDECL __tsan_atomic8_compare_exchange_strong(
-    volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v, int mo,
+    int fail_mo);
 int SANITIZER_CDECL __tsan_atomic16_compare_exchange_strong(
-    volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v, int mo,
+    int fail_mo);
 int SANITIZER_CDECL __tsan_atomic32_compare_exchange_strong(
-    volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v, int mo,
+    int fail_mo);
 int SANITIZER_CDECL __tsan_atomic64_compare_exchange_strong(
-    volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v, int mo,
+    int fail_mo);
 #if __TSAN_HAS_INT128
 int SANITIZER_CDECL __tsan_atomic128_compare_exchange_strong(
     volatile __tsan_atomic128 *a, __tsan_atomic128 *c, __tsan_atomic128 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    int mo, int fail_mo);
 #endif
 
 __tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_compare_exchange_val(
-    volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v, int mo,
+    int fail_mo);
 __tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_compare_exchange_val(
-    volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v, int mo,
+    int fail_mo);
 __tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_compare_exchange_val(
-    volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v, int mo,
+    int fail_mo);
 __tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_compare_exchange_val(
-    volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v, int mo,
+    int fail_mo);
 #if __TSAN_HAS_INT128
 __tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_compare_exchange_val(
     volatile __tsan_atomic128 *a, __tsan_atomic128 c, __tsan_atomic128 v,
-    __tsan_memory_order mo, __tsan_memory_order fail_mo);
+    int mo, int fail_mo);
 #endif
 
-void SANITIZER_CDECL __tsan_atomic_thread_fence(__tsan_memory_order mo);
-void SANITIZER_CDECL __tsan_atomic_signal_fence(__tsan_memory_order mo);
+void SANITIZER_CDECL __tsan_atomic_thread_fence(int mo);
+void SANITIZER_CDECL __tsan_atomic_signal_fence(int mo);
 
 #ifdef __cplusplus
 } // extern "C"
diff --git a/compiler-rt/lib/tsan/rtl/tsan_interface.h b/compiler-rt/lib/tsan/rtl/tsan_interface.h
index 2b8a13ddb842cc..6c197449904c92 100644
--- a/compiler-rt/lib/tsan/rtl/tsan_interface.h
+++ b/compiler-rt/lib/tsan/rtl/tsan_interface.h
@@ -232,180 +232,180 @@ struct ThreadState;
 
 extern "C" {
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_load(const volatile a8 *a, morder mo);
+a8 __tsan_atomic8_load(const volatile a8 *a, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_load(const volatile a16 *a, morder mo);
+a16 __tsan_atomic16_load(const volatile a16 *a, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_load(const volatile a32 *a, morder mo);
+a32 __tsan_atomic32_load(const volatile a32 *a, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_load(const volatile a64 *a, morder mo);
+a64 __tsan_atomic64_load(const volatile a64 *a, int mo);
 #if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_load(const volatile a128 *a, morder mo);
+a128 __tsan_atomic128_load(const volatile a128 *a, int mo);
 #endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic8_store(volatile a8 *a, a8 v, morder mo);
+void __tsan_atomic8_store(volatile a8 *a, a8 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic16_store(volatile a16 *a, a16 v, morder mo);
+void __tsan_atomic16_store(volatile a16 *a, a16 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic32_store(volatile a32 *a, a32 v, morder mo);
+void __tsan_atomic32_store(volatile a32 *a, a32 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic64_store(volatile a64 *a, a64 v, morder mo);
+void __tsan_atomic64_store(volatile a64 *a, a64 v, int mo);
 #if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic128_store(volatile a128 *a, a128 v, morder mo);
+void __tsan_atomic128_store(volatile a128 *a, a128 v, int mo);
 #endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_exchange(volatile a8 *a, a8 v, morder mo);
+a8 __tsan_atomic8_exchange(volatile a8 *a, a8 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_exchange(volatile a16 *a, a16 v, morder mo);
+a16 __tsan_atomic16_exchange(volatile a16 *a, a16 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_exchange(volatile a32 *a, a32 v, morder mo);
+a32 __tsan_atomic32_exchange(volatile a32 *a, a32 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_exchange(volatile a64 *a, a64 v, morder mo);
+a64 __tsan_atomic64_exchange(volatile a64 *a, a64 v, int mo);
 #if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_exchange(volatile a128 *a, a128 v, morder mo);
+a128 __tsan_atomic128_exchange(volatile a128 *a, a128 v, int mo);
 #endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_fetch_add(volatile a8 *a, a8 v, morder mo);
+a8 __tsan_atomic8_fetch_add(volatile a8 *a, a8 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_fetch_add(volatile a16 *a, a16 v, morder mo);
+a16 __tsan_atomic16_fetch_add(volatile a16 *a, a16 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_fetch_add(volatile a32 *a, a32 v, morder mo);
+a32 __tsan_atomic32_fetch_add(volatile a32 *a, a32 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_fetch_add(volatile a64 *a, a64 v, morder mo);
+a64 __tsan_atomic64_fetch_add(volatile a64 *a, a64 v, int mo);
 #if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_fetch_add(volatile a128 *a, a128 v, morder mo);
+a128 __tsan_atomic128_fetch_add(volatile a128 *a, a128 v, int mo);
 #endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_fetch_sub(volatile a8 *a, a8 v, morder mo);
+a8 __tsan_atomic8_fetch_sub(volatile a8 *a, a8 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_fetch_sub(volatile a16 *a, a16 v, morder mo);
+a16 __tsan_atomic16_fetch_sub(volatile a16 *a, a16 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_fetch_sub(volatile a32 *a, a32 v, morder mo);
+a32 __tsan_atomic32_fetch_sub(volatile a32 *a, a32 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_fetch_sub(volatile a64 *a, a64 v, morder mo);
+a64 __tsan_atomic64_fetch_sub(volatile a64 *a, a64 v, int mo);
 #if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_fetch_sub(volatile a128 *a, a128 v, morder mo);
+a128 __tsan_atomic128_fetch_sub(volatile a128 *a, a128 v, int mo);
 #endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_fetch_and(volatile a8 *a, a8 v, morder mo);
+a8 __tsan_atomic8_fetch_and(volatile a8 *a, a8 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_fetch_and(volatile a16 *a, a16 v, morder mo);
+a16 __tsan_atomic16_fetch_and(volatile a16 *a, a16 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_fetch_and(volatile a32 *a, a32 v, morder mo);
+a32 __tsan_atomic32_fetch_and(volatile a32 *a, a32 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_fetch_and(volatile a64 *a, a64 v, morder mo);
+a64 __tsan_atomic64_fetch_and(volatile a64 *a, a64 v, int mo);
 #if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_fetch_and(volatile a128 *a, a128 v, morder mo);
+a128 __tsan_atomic128_fetch_and(volatile a128 *a, a128 v, int mo);
 #endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_fetch_or(volatile a8 *a, a8 v, morder mo);
+a8 __tsan_atomic8_fetch_or(volatile a8 *a, a8 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_fetch_or(volatile a16 *a, a16 v, morder mo);
+a16 __tsan_atomic16_fetch_or(volatile a16 *a, a16 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_fetch_or(volatile a32 *a, a32 v, morder mo);
+a32 __tsan_atomic32_fetch_or(volatile a32 *a, a32 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_fetch_or(volatile a64 *a, a64 v, morder mo);
+a64 __tsan_atomic64_fetch_or(volatile a64 *a, a64 v, int mo);
 #if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_fetch_or(volatile a128 *a, a128 v, morder mo);
+a128 __tsan_atomic128_fetch_or(volatile a128 *a, a128 v, int mo);
 #endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_fetch_xor(volatile a8 *a, a8 v, morder mo);
+a8 __tsan_atomic8_fetch_xor(volatile a8 *a, a8 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_fetch_xor(volatile a16 *a, a16 v, morder mo);
+a16 __tsan_atomic16_fetch_xor(volatile a16 *a, a16 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_fetch_xor(volatile a32 *a, a32 v, morder mo);
+a32 __tsan_atomic32_fetch_xor(volatile a32 *a, a32 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_fetch_xor(volatile a64 *a, a64 v, morder mo);
+a64 __tsan_atomic64_fetch_xor(volatile a64 *a, a64 v, int mo);
 #if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_fetch_xor(volatile a128 *a, a128 v, morder mo);
+a128 __tsan_atomic128_fetch_xor(volatile a128 *a, a128 v, int mo);
 #endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_fetch_nand(volatile a8 *a, a8 v, morder mo);
+a8 __tsan_atomic8_fetch_nand(volatile a8 *a, a8 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_fetch_nand(volatile a16 *a, a16 v, morder mo);
+a16 __tsan_atomic16_fetch_nand(volatile a16 *a, a16 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_fetch_nand(volatile a32 *a, a32 v, morder mo);
+a32 __tsan_atomic32_fetch_nand(volatile a32 *a, a32 v, int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_fetch_nand(volatile a64 *a, a64 v, morder mo);
+a64 __tsan_atomic64_fetch_nand(volatile a64 *a, a64 v, int mo);
 #if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_fetch_nand(volatile a128 *a, a128 v, morder mo);
+a128 __tsan_atomic128_fetch_nand(volatile a128 *a, a128 v, int mo);
 #endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-int __tsan_atomic8_compare_exchange_strong(volatile a8 *a, a8 *c, a8 v,
-                                           morder mo, morder fmo);
+int __tsan_atomic8_compare_exchange_strong(volatile a8 *a, a8 *c, a8 v, int mo,
+                                           int fmo);
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic16_compare_exchange_strong(volatile a16 *a, a16 *c, a16 v,
-                                            morder mo, morder fmo);
+                                            int mo, int fmo);
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic32_compare_exchange_strong(volatile a32 *a, a32 *c, a32 v,
-                                            morder mo, morder fmo);
+                                            int mo, int fmo);
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic64_compare_exchange_strong(volatile a64 *a, a64 *c, a64 v,
-                                            morder mo, morder fmo);
+                                            int mo, int fmo);
 #if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic128_compare_exchange_strong(volatile a128 *a, a128 *c, a128 v,
-                                             morder mo, morder fmo);
+                                             int mo, int fmo);
 #endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-int __tsan_atomic8_compare_exchange_weak(volatile a8 *a, a8 *c, a8 v, morder mo,
-                                         morder fmo);
+int __tsan_atomic8_compare_exchange_weak(volatile a8 *a, a8 *c, a8 v, int mo,
+                                         int fmo);
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic16_compare_exchange_weak(volatile a16 *a, a16 *c, a16 v,
-                                          morder mo, morder fmo);
+                                          int mo, int fmo);
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic32_compare_exchange_weak(volatile a32 *a, a32 *c, a32 v,
-                                          morder mo, morder fmo);
+                                          int mo, int fmo);
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic64_compare_exchange_weak(volatile a64 *a, a64 *c, a64 v,
-                                          morder mo, morder fmo);
+                                          int mo, int fmo);
 #if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic128_compare_exchange_weak(volatile a128 *a, a128 *c, a128 v,
-                                           morder mo, morder fmo);
+                                           int mo, int fmo);
 #endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_compare_exchange_val(volatile a8 *a, a8 c, a8 v, morder mo,
-                                       morder fmo);
+a8 __tsan_atomic8_compare_exchange_val(volatile a8 *a, a8 c, a8 v, int mo,
+                                       int fmo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_compare_exchange_val(volatile a16 *a, a16 c, a16 v,
-                                         morder mo, morder fmo);
+a16 __tsan_atomic16_compare_exchange_val(volatile a16 *a, a16 c, a16 v, int mo,
+                                         int fmo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_compare_exchange_val(volatile a32 *a, a32 c, a32 v,
-                                         morder mo, morder fmo);
+a32 __tsan_atomic32_compare_exchange_val(volatile a32 *a, a32 c, a32 v, int mo,
+                                         int fmo);
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_compare_exchange_val(volatile a64 *a, a64 c, a64 v,
-                                         morder mo, morder fmo);
+a64 __tsan_atomic64_compare_exchange_val(volatile a64 *a, a64 c, a64 v, int mo,
+                                         int fmo);
 #if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
 a128 __tsan_atomic128_compare_exchange_val(volatile a128 *a, a128 c, a128 v,
-                                           morder mo, morder fmo);
+                                           int mo, int fmo);
 #endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic_thread_fence(morder mo);
+void __tsan_atomic_thread_fence(int mo);
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic_signal_fence(morder mo);
+void __tsan_atomic_signal_fence(int mo);
 
 SANITIZER_INTERFACE_ATTRIBUTE
 void __tsan_go_atomic32_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
diff --git a/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp b/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp
index 67106f59f6e7f5..d12e0f596a7fb4 100644
--- a/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp
+++ b/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp
@@ -501,9 +501,10 @@ struct OpFence {
 // C/C++
 
 static morder convert_morder(morder mo) {
-  if (flags()->force_seq_cst_atomics)
-    return (morder)mo_seq_cst;
+  return flags()->force_seq_cst_atomics ? mo_seq_cst : mo;
+}
 
+static morder to_morder(int mo) {
   // Filter out additional memory order flags:
   // MEMMODEL_SYNC        = 1 << 15
   // __ATOMIC_HLE_ACQUIRE = 1 << 16
@@ -514,7 +515,7 @@ static morder convert_morder(morder mo) {
   // since we use __sync_ atomics for actual atomic operations,
   // we can safely ignore it as well. It also subtly affects semantics,
   // but we don't model the difference.
-  return (morder)(mo & 0x7fff);
+  return static_cast<morder>(mo & 0x7fff);
 }
 
 #  define ATOMIC_IMPL(func, mo, ...)                                  \
@@ -527,349 +528,349 @@ static morder convert_morder(morder mo) {
 
 extern "C" {
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_load(const volatile a8 *a, morder mo) {
-  ATOMIC_IMPL(Load, mo, a);
+a8 __tsan_atomic8_load(const volatile a8 *a, int mo) {
+  ATOMIC_IMPL(Load, to_morder(mo), a);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_load(const volatile a16 *a, morder mo) {
-  ATOMIC_IMPL(Load, mo, a);
+a16 __tsan_atomic16_load(const volatile a16 *a, int mo) {
+  ATOMIC_IMPL(Load, to_morder(mo), a);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_load(const volatile a32 *a, morder mo) {
-  ATOMIC_IMPL(Load, mo, a);
+a32 __tsan_atomic32_load(const volatile a32 *a, int mo) {
+  ATOMIC_IMPL(Load, to_morder(mo), a);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_load(const volatile a64 *a, morder mo) {
-  ATOMIC_IMPL(Load, mo, a);
+a64 __tsan_atomic64_load(const volatile a64 *a, int mo) {
+  ATOMIC_IMPL(Load, to_morder(mo), a);
 }
 
 #  if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_load(const volatile a128 *a, morder mo) {
-  ATOMIC_IMPL(Load, mo, a);
+a128 __tsan_atomic128_load(const volatile a128 *a, int mo) {
+  ATOMIC_IMPL(Load, to_morder(mo), a);
 }
 #  endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic8_store(volatile a8 *a, a8 v, morder mo) {
-  ATOMIC_IMPL(Store, mo, a, v);
+void __tsan_atomic8_store(volatile a8 *a, a8 v, int mo) {
+  ATOMIC_IMPL(Store, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic16_store(volatile a16 *a, a16 v, morder mo) {
-  ATOMIC_IMPL(Store, mo, a, v);
+void __tsan_atomic16_store(volatile a16 *a, a16 v, int mo) {
+  ATOMIC_IMPL(Store, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic32_store(volatile a32 *a, a32 v, morder mo) {
-  ATOMIC_IMPL(Store, mo, a, v);
+void __tsan_atomic32_store(volatile a32 *a, a32 v, int mo) {
+  ATOMIC_IMPL(Store, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic64_store(volatile a64 *a, a64 v, morder mo) {
-  ATOMIC_IMPL(Store, mo, a, v);
+void __tsan_atomic64_store(volatile a64 *a, a64 v, int mo) {
+  ATOMIC_IMPL(Store, to_morder(mo), a, v);
 }
 
 #  if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic128_store(volatile a128 *a, a128 v, morder mo) {
-  ATOMIC_IMPL(Store, mo, a, v);
+void __tsan_atomic128_store(volatile a128 *a, a128 v, int mo) {
+  ATOMIC_IMPL(Store, to_morder(mo), a, v);
 }
 #  endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_exchange(volatile a8 *a, a8 v, morder mo) {
-  ATOMIC_IMPL(Exchange, mo, a, v);
+a8 __tsan_atomic8_exchange(volatile a8 *a, a8 v, int mo) {
+  ATOMIC_IMPL(Exchange, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_exchange(volatile a16 *a, a16 v, morder mo) {
-  ATOMIC_IMPL(Exchange, mo, a, v);
+a16 __tsan_atomic16_exchange(volatile a16 *a, a16 v, int mo) {
+  ATOMIC_IMPL(Exchange, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_exchange(volatile a32 *a, a32 v, morder mo) {
-  ATOMIC_IMPL(Exchange, mo, a, v);
+a32 __tsan_atomic32_exchange(volatile a32 *a, a32 v, int mo) {
+  ATOMIC_IMPL(Exchange, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_exchange(volatile a64 *a, a64 v, morder mo) {
-  ATOMIC_IMPL(Exchange, mo, a, v);
+a64 __tsan_atomic64_exchange(volatile a64 *a, a64 v, int mo) {
+  ATOMIC_IMPL(Exchange, to_morder(mo), a, v);
 }
 
 #  if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_exchange(volatile a128 *a, a128 v, morder mo) {
-  ATOMIC_IMPL(Exchange, mo, a, v);
+a128 __tsan_atomic128_exchange(volatile a128 *a, a128 v, int mo) {
+  ATOMIC_IMPL(Exchange, to_morder(mo), a, v);
 }
 #  endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_fetch_add(volatile a8 *a, a8 v, morder mo) {
-  ATOMIC_IMPL(FetchAdd, mo, a, v);
+a8 __tsan_atomic8_fetch_add(volatile a8 *a, a8 v, int mo) {
+  ATOMIC_IMPL(FetchAdd, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_fetch_add(volatile a16 *a, a16 v, morder mo) {
-  ATOMIC_IMPL(FetchAdd, mo, a, v);
+a16 __tsan_atomic16_fetch_add(volatile a16 *a, a16 v, int mo) {
+  ATOMIC_IMPL(FetchAdd, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_fetch_add(volatile a32 *a, a32 v, morder mo) {
-  ATOMIC_IMPL(FetchAdd, mo, a, v);
+a32 __tsan_atomic32_fetch_add(volatile a32 *a, a32 v, int mo) {
+  ATOMIC_IMPL(FetchAdd, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_fetch_add(volatile a64 *a, a64 v, morder mo) {
-  ATOMIC_IMPL(FetchAdd, mo, a, v);
+a64 __tsan_atomic64_fetch_add(volatile a64 *a, a64 v, int mo) {
+  ATOMIC_IMPL(FetchAdd, to_morder(mo), a, v);
 }
 
 #  if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_fetch_add(volatile a128 *a, a128 v, morder mo) {
-  ATOMIC_IMPL(FetchAdd, mo, a, v);
+a128 __tsan_atomic128_fetch_add(volatile a128 *a, a128 v, int mo) {
+  ATOMIC_IMPL(FetchAdd, to_morder(mo), a, v);
 }
 #  endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_fetch_sub(volatile a8 *a, a8 v, morder mo) {
-  ATOMIC_IMPL(FetchSub, mo, a, v);
+a8 __tsan_atomic8_fetch_sub(volatile a8 *a, a8 v, int mo) {
+  ATOMIC_IMPL(FetchSub, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_fetch_sub(volatile a16 *a, a16 v, morder mo) {
-  ATOMIC_IMPL(FetchSub, mo, a, v);
+a16 __tsan_atomic16_fetch_sub(volatile a16 *a, a16 v, int mo) {
+  ATOMIC_IMPL(FetchSub, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_fetch_sub(volatile a32 *a, a32 v, morder mo) {
-  ATOMIC_IMPL(FetchSub, mo, a, v);
+a32 __tsan_atomic32_fetch_sub(volatile a32 *a, a32 v, int mo) {
+  ATOMIC_IMPL(FetchSub, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_fetch_sub(volatile a64 *a, a64 v, morder mo) {
-  ATOMIC_IMPL(FetchSub, mo, a, v);
+a64 __tsan_atomic64_fetch_sub(volatile a64 *a, a64 v, int mo) {
+  ATOMIC_IMPL(FetchSub, to_morder(mo), a, v);
 }
 
 #  if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_fetch_sub(volatile a128 *a, a128 v, morder mo) {
-  ATOMIC_IMPL(FetchSub, mo, a, v);
+a128 __tsan_atomic128_fetch_sub(volatile a128 *a, a128 v, int mo) {
+  ATOMIC_IMPL(FetchSub, to_morder(mo), a, v);
 }
 #  endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_fetch_and(volatile a8 *a, a8 v, morder mo) {
-  ATOMIC_IMPL(FetchAnd, mo, a, v);
+a8 __tsan_atomic8_fetch_and(volatile a8 *a, a8 v, int mo) {
+  ATOMIC_IMPL(FetchAnd, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_fetch_and(volatile a16 *a, a16 v, morder mo) {
-  ATOMIC_IMPL(FetchAnd, mo, a, v);
+a16 __tsan_atomic16_fetch_and(volatile a16 *a, a16 v, int mo) {
+  ATOMIC_IMPL(FetchAnd, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_fetch_and(volatile a32 *a, a32 v, morder mo) {
-  ATOMIC_IMPL(FetchAnd, mo, a, v);
+a32 __tsan_atomic32_fetch_and(volatile a32 *a, a32 v, int mo) {
+  ATOMIC_IMPL(FetchAnd, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_fetch_and(volatile a64 *a, a64 v, morder mo) {
-  ATOMIC_IMPL(FetchAnd, mo, a, v);
+a64 __tsan_atomic64_fetch_and(volatile a64 *a, a64 v, int mo) {
+  ATOMIC_IMPL(FetchAnd, to_morder(mo), a, v);
 }
 
 #  if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_fetch_and(volatile a128 *a, a128 v, morder mo) {
-  ATOMIC_IMPL(FetchAnd, mo, a, v);
+a128 __tsan_atomic128_fetch_and(volatile a128 *a, a128 v, int mo) {
+  ATOMIC_IMPL(FetchAnd, to_morder(mo), a, v);
 }
 #  endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_fetch_or(volatile a8 *a, a8 v, morder mo) {
-  ATOMIC_IMPL(FetchOr, mo, a, v);
+a8 __tsan_atomic8_fetch_or(volatile a8 *a, a8 v, int mo) {
+  ATOMIC_IMPL(FetchOr, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_fetch_or(volatile a16 *a, a16 v, morder mo) {
-  ATOMIC_IMPL(FetchOr, mo, a, v);
+a16 __tsan_atomic16_fetch_or(volatile a16 *a, a16 v, int mo) {
+  ATOMIC_IMPL(FetchOr, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_fetch_or(volatile a32 *a, a32 v, morder mo) {
-  ATOMIC_IMPL(FetchOr, mo, a, v);
+a32 __tsan_atomic32_fetch_or(volatile a32 *a, a32 v, int mo) {
+  ATOMIC_IMPL(FetchOr, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_fetch_or(volatile a64 *a, a64 v, morder mo) {
-  ATOMIC_IMPL(FetchOr, mo, a, v);
+a64 __tsan_atomic64_fetch_or(volatile a64 *a, a64 v, int mo) {
+  ATOMIC_IMPL(FetchOr, to_morder(mo), a, v);
 }
 
 #  if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_fetch_or(volatile a128 *a, a128 v, morder mo) {
-  ATOMIC_IMPL(FetchOr, mo, a, v);
+a128 __tsan_atomic128_fetch_or(volatile a128 *a, a128 v, int mo) {
+  ATOMIC_IMPL(FetchOr, to_morder(mo), a, v);
 }
 #  endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_fetch_xor(volatile a8 *a, a8 v, morder mo) {
-  ATOMIC_IMPL(FetchXor, mo, a, v);
+a8 __tsan_atomic8_fetch_xor(volatile a8 *a, a8 v, int mo) {
+  ATOMIC_IMPL(FetchXor, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_fetch_xor(volatile a16 *a, a16 v, morder mo) {
-  ATOMIC_IMPL(FetchXor, mo, a, v);
+a16 __tsan_atomic16_fetch_xor(volatile a16 *a, a16 v, int mo) {
+  ATOMIC_IMPL(FetchXor, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_fetch_xor(volatile a32 *a, a32 v, morder mo) {
-  ATOMIC_IMPL(FetchXor, mo, a, v);
+a32 __tsan_atomic32_fetch_xor(volatile a32 *a, a32 v, int mo) {
+  ATOMIC_IMPL(FetchXor, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_fetch_xor(volatile a64 *a, a64 v, morder mo) {
-  ATOMIC_IMPL(FetchXor, mo, a, v);
+a64 __tsan_atomic64_fetch_xor(volatile a64 *a, a64 v, int mo) {
+  ATOMIC_IMPL(FetchXor, to_morder(mo), a, v);
 }
 
 #  if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_fetch_xor(volatile a128 *a, a128 v, morder mo) {
-  ATOMIC_IMPL(FetchXor, mo, a, v);
+a128 __tsan_atomic128_fetch_xor(volatile a128 *a, a128 v, int mo) {
+  ATOMIC_IMPL(FetchXor, to_morder(mo), a, v);
 }
 #  endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_fetch_nand(volatile a8 *a, a8 v, morder mo) {
-  ATOMIC_IMPL(FetchNand, mo, a, v);
+a8 __tsan_atomic8_fetch_nand(volatile a8 *a, a8 v, int mo) {
+  ATOMIC_IMPL(FetchNand, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_fetch_nand(volatile a16 *a, a16 v, morder mo) {
-  ATOMIC_IMPL(FetchNand, mo, a, v);
+a16 __tsan_atomic16_fetch_nand(volatile a16 *a, a16 v, int mo) {
+  ATOMIC_IMPL(FetchNand, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_fetch_nand(volatile a32 *a, a32 v, morder mo) {
-  ATOMIC_IMPL(FetchNand, mo, a, v);
+a32 __tsan_atomic32_fetch_nand(volatile a32 *a, a32 v, int mo) {
+  ATOMIC_IMPL(FetchNand, to_morder(mo), a, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_fetch_nand(volatile a64 *a, a64 v, morder mo) {
-  ATOMIC_IMPL(FetchNand, mo, a, v);
+a64 __tsan_atomic64_fetch_nand(volatile a64 *a, a64 v, int mo) {
+  ATOMIC_IMPL(FetchNand, to_morder(mo), a, v);
 }
 
 #  if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
-a128 __tsan_atomic128_fetch_nand(volatile a128 *a, a128 v, morder mo) {
-  ATOMIC_IMPL(FetchNand, mo, a, v);
+a128 __tsan_atomic128_fetch_nand(volatile a128 *a, a128 v, int mo) {
+  ATOMIC_IMPL(FetchNand, to_morder(mo), a, v);
 }
 #  endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-int __tsan_atomic8_compare_exchange_strong(volatile a8 *a, a8 *c, a8 v,
-                                           morder mo, morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+int __tsan_atomic8_compare_exchange_strong(volatile a8 *a, a8 *c, a8 v, int mo,
+                                           int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic16_compare_exchange_strong(volatile a16 *a, a16 *c, a16 v,
-                                            morder mo, morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+                                            int mo, int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic32_compare_exchange_strong(volatile a32 *a, a32 *c, a32 v,
-                                            morder mo, morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+                                            int mo, int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic64_compare_exchange_strong(volatile a64 *a, a64 *c, a64 v,
-                                            morder mo, morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+                                            int mo, int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 
 #  if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic128_compare_exchange_strong(volatile a128 *a, a128 *c, a128 v,
-                                             morder mo, morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+                                             int mo, int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 #  endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-int __tsan_atomic8_compare_exchange_weak(volatile a8 *a, a8 *c, a8 v, morder mo,
-                                         morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+int __tsan_atomic8_compare_exchange_weak(volatile a8 *a, a8 *c, a8 v, int mo,
+                                         int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic16_compare_exchange_weak(volatile a16 *a, a16 *c, a16 v,
-                                          morder mo, morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+                                          int mo, int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic32_compare_exchange_weak(volatile a32 *a, a32 *c, a32 v,
-                                          morder mo, morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+                                          int mo, int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic64_compare_exchange_weak(volatile a64 *a, a64 *c, a64 v,
-                                          morder mo, morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+                                          int mo, int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 
 #  if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
 int __tsan_atomic128_compare_exchange_weak(volatile a128 *a, a128 *c, a128 v,
-                                           morder mo, morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+                                           int mo, int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 #  endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a8 __tsan_atomic8_compare_exchange_val(volatile a8 *a, a8 c, a8 v, morder mo,
-                                       morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+a8 __tsan_atomic8_compare_exchange_val(volatile a8 *a, a8 c, a8 v, int mo,
+                                       int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a16 __tsan_atomic16_compare_exchange_val(volatile a16 *a, a16 c, a16 v,
-                                         morder mo, morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+a16 __tsan_atomic16_compare_exchange_val(volatile a16 *a, a16 c, a16 v, int mo,
+                                         int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a32 __tsan_atomic32_compare_exchange_val(volatile a32 *a, a32 c, a32 v,
-                                         morder mo, morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+a32 __tsan_atomic32_compare_exchange_val(volatile a32 *a, a32 c, a32 v, int mo,
+                                         int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-a64 __tsan_atomic64_compare_exchange_val(volatile a64 *a, a64 c, a64 v,
-                                         morder mo, morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+a64 __tsan_atomic64_compare_exchange_val(volatile a64 *a, a64 c, a64 v, int mo,
+                                         int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 
 #  if __TSAN_HAS_INT128
 SANITIZER_INTERFACE_ATTRIBUTE
 a128 __tsan_atomic128_compare_exchange_val(volatile a128 *a, a128 c, a128 v,
-                                           morder mo, morder fmo) {
-  ATOMIC_IMPL(CAS, mo, fmo, a, c, v);
+                                           int mo, int fmo) {
+  ATOMIC_IMPL(CAS, to_morder(mo), to_morder(fmo), a, c, v);
 }
 #  endif
 
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic_thread_fence(morder mo) { ATOMIC_IMPL(Fence, mo); }
+void __tsan_atomic_thread_fence(int mo) { ATOMIC_IMPL(Fence, to_morder(mo)); }
 
 SANITIZER_INTERFACE_ATTRIBUTE
-void __tsan_atomic_signal_fence(morder mo) {}
+void __tsan_atomic_signal_fence(int mo) {}
 }  // extern "C"
 
 #else  // #if !SANITIZER_GO



More information about the llvm-commits mailing list