[Openmp-commits] [openmp] 44a11c3 - [OpenMP] Use explicit type casting in kmp_atomic.cpp

Hansang Bae via Openmp-commits openmp-commits at lists.llvm.org
Tue Nov 17 12:31:33 PST 2020


Author: Hansang Bae
Date: 2020-11-17T14:31:13-06:00
New Revision: 44a11c342caa70efe9f9d07db3e66dd48f701aca

URL: https://github.com/llvm/llvm-project/commit/44a11c342caa70efe9f9d07db3e66dd48f701aca
DIFF: https://github.com/llvm/llvm-project/commit/44a11c342caa70efe9f9d07db3e66dd48f701aca.diff

LOG: [OpenMP] Use explicit type casting in kmp_atomic.cpp

Differential Revision: https://reviews.llvm.org/D91105

Added: 
    

Modified: 
    openmp/runtime/src/kmp_atomic.cpp
    openmp/runtime/src/kmp_stats.h

Removed: 
    


################################################################################
diff  --git a/openmp/runtime/src/kmp_atomic.cpp b/openmp/runtime/src/kmp_atomic.cpp
index 148b2da3f524..264ade6afbe9 100644
--- a/openmp/runtime/src/kmp_atomic.cpp
+++ b/openmp/runtime/src/kmp_atomic.cpp
@@ -606,17 +606,17 @@ kmp_atomic_lock_t __kmp_atomic_lock_32c;
 
 #if (KMP_ARCH_X86) && KMP_HAVE_QUAD
 
-static inline void operator+=(Quad_a4_t &lhs, Quad_a4_t &rhs) {
-  lhs.q += rhs.q;
+static inline Quad_a4_t operator+(Quad_a4_t &lhs, Quad_a4_t &rhs) {
+  return lhs.q + rhs.q;
 }
-static inline void operator-=(Quad_a4_t &lhs, Quad_a4_t &rhs) {
-  lhs.q -= rhs.q;
+static inline Quad_a4_t operator-(Quad_a4_t &lhs, Quad_a4_t &rhs) {
+  return lhs.q - rhs.q;
 }
-static inline void operator*=(Quad_a4_t &lhs, Quad_a4_t &rhs) {
-  lhs.q *= rhs.q;
+static inline Quad_a4_t operator*(Quad_a4_t &lhs, Quad_a4_t &rhs) {
+  return lhs.q * rhs.q;
 }
-static inline void operator/=(Quad_a4_t &lhs, Quad_a4_t &rhs) {
-  lhs.q /= rhs.q;
+static inline Quad_a4_t operator/(Quad_a4_t &lhs, Quad_a4_t &rhs) {
+  return lhs.q / rhs.q;
 }
 static inline bool operator<(Quad_a4_t &lhs, Quad_a4_t &rhs) {
   return lhs.q < rhs.q;
@@ -625,17 +625,17 @@ static inline bool operator>(Quad_a4_t &lhs, Quad_a4_t &rhs) {
   return lhs.q > rhs.q;
 }
 
-static inline void operator+=(Quad_a16_t &lhs, Quad_a16_t &rhs) {
-  lhs.q += rhs.q;
+static inline Quad_a16_t operator+(Quad_a16_t &lhs, Quad_a16_t &rhs) {
+  return lhs.q + rhs.q;
 }
-static inline void operator-=(Quad_a16_t &lhs, Quad_a16_t &rhs) {
-  lhs.q -= rhs.q;
+static inline Quad_a16_t operator-(Quad_a16_t &lhs, Quad_a16_t &rhs) {
+  return lhs.q - rhs.q;
 }
-static inline void operator*=(Quad_a16_t &lhs, Quad_a16_t &rhs) {
-  lhs.q *= rhs.q;
+static inline Quad_a16_t operator*(Quad_a16_t &lhs, Quad_a16_t &rhs) {
+  return lhs.q * rhs.q;
 }
-static inline void operator/=(Quad_a16_t &lhs, Quad_a16_t &rhs) {
-  lhs.q /= rhs.q;
+static inline Quad_a16_t operator/(Quad_a16_t &lhs, Quad_a16_t &rhs) {
+  return lhs.q / rhs.q;
 }
 static inline bool operator<(Quad_a16_t &lhs, Quad_a16_t &rhs) {
   return lhs.q < rhs.q;
@@ -644,34 +644,38 @@ static inline bool operator>(Quad_a16_t &lhs, Quad_a16_t &rhs) {
   return lhs.q > rhs.q;
 }
 
-static inline void operator+=(kmp_cmplx128_a4_t &lhs, kmp_cmplx128_a4_t &rhs) {
-  lhs.q += rhs.q;
+static inline kmp_cmplx128_a4_t operator+(kmp_cmplx128_a4_t &lhs,
+                                          kmp_cmplx128_a4_t &rhs) {
+  return lhs.q + rhs.q;
 }
-static inline void operator-=(kmp_cmplx128_a4_t &lhs, kmp_cmplx128_a4_t &rhs) {
-  lhs.q -= rhs.q;
+static inline kmp_cmplx128_a4_t operator-(kmp_cmplx128_a4_t &lhs,
+                                          kmp_cmplx128_a4_t &rhs) {
+  return lhs.q - rhs.q;
 }
-static inline void operator*=(kmp_cmplx128_a4_t &lhs, kmp_cmplx128_a4_t &rhs) {
-  lhs.q *= rhs.q;
+static inline kmp_cmplx128_a4_t operator*(kmp_cmplx128_a4_t &lhs,
+                                          kmp_cmplx128_a4_t &rhs) {
+  return lhs.q * rhs.q;
 }
-static inline void operator/=(kmp_cmplx128_a4_t &lhs, kmp_cmplx128_a4_t &rhs) {
-  lhs.q /= rhs.q;
+static inline kmp_cmplx128_a4_t operator/(kmp_cmplx128_a4_t &lhs,
+                                          kmp_cmplx128_a4_t &rhs) {
+  return lhs.q / rhs.q;
 }
 
-static inline void operator+=(kmp_cmplx128_a16_t &lhs,
-                              kmp_cmplx128_a16_t &rhs) {
-  lhs.q += rhs.q;
+static inline kmp_cmplx128_a16_t operator+(kmp_cmplx128_a16_t &lhs,
+                                           kmp_cmplx128_a16_t &rhs) {
+  return lhs.q + rhs.q;
 }
-static inline void operator-=(kmp_cmplx128_a16_t &lhs,
-                              kmp_cmplx128_a16_t &rhs) {
-  lhs.q -= rhs.q;
+static inline kmp_cmplx128_a16_t operator-(kmp_cmplx128_a16_t &lhs,
+                                           kmp_cmplx128_a16_t &rhs) {
+  return lhs.q - rhs.q;
 }
-static inline void operator*=(kmp_cmplx128_a16_t &lhs,
-                              kmp_cmplx128_a16_t &rhs) {
-  lhs.q *= rhs.q;
+static inline kmp_cmplx128_a16_t operator*(kmp_cmplx128_a16_t &lhs,
+                                           kmp_cmplx128_a16_t &rhs) {
+  return lhs.q * rhs.q;
 }
-static inline void operator/=(kmp_cmplx128_a16_t &lhs,
-                              kmp_cmplx128_a16_t &rhs) {
-  lhs.q /= rhs.q;
+static inline kmp_cmplx128_a16_t operator/(kmp_cmplx128_a16_t &lhs,
+                                           kmp_cmplx128_a16_t &rhs) {
+  return lhs.q / rhs.q;
 }
 
 #endif // (KMP_ARCH_X86) && KMP_HAVE_QUAD
@@ -726,6 +730,11 @@ static inline void operator/=(kmp_cmplx128_a16_t &lhs,
                                                                                \
   __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
 
+#define OP_UPDATE_CRITICAL(TYPE, OP, LCK_ID)                                   \
+  __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);                       \
+  (*lhs) = (TYPE)((*lhs)OP(rhs));                                              \
+  __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
+
 // ------------------------------------------------------------------------
 // For GNU compatibility, we may need to use a critical section,
 // even though it is not required by the ISA.
@@ -755,8 +764,16 @@ static inline void operator/=(kmp_cmplx128_a16_t &lhs,
     OP_CRITICAL(OP, 0);                                                        \
     return;                                                                    \
   }
+
+#define OP_UPDATE_GOMP_CRITICAL(TYPE, OP, FLAG)                                \
+  if ((FLAG) && (__kmp_atomic_mode == 2)) {                                    \
+    KMP_CHECK_GTID;                                                            \
+    OP_UPDATE_CRITICAL(TYPE, OP, 0);                                           \
+    return;                                                                    \
+  }
 #else
 #define OP_GOMP_CRITICAL(OP, FLAG)
+#define OP_UPDATE_GOMP_CRITICAL(TYPE, OP, FLAG)
 #endif /* KMP_GOMP_COMPAT */
 
 #if KMP_MIC
@@ -774,14 +791,14 @@ static inline void operator/=(kmp_cmplx128_a16_t &lhs,
   {                                                                            \
     TYPE old_value, new_value;                                                 \
     old_value = *(TYPE volatile *)lhs;                                         \
-    new_value = old_value OP rhs;                                              \
+    new_value = (TYPE)(old_value OP rhs);                                      \
     while (!KMP_COMPARE_AND_STORE_ACQ##BITS(                                   \
         (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value,     \
         *VOLATILE_CAST(kmp_int##BITS *) & new_value)) {                        \
       KMP_DO_PAUSE;                                                            \
                                                                                \
       old_value = *(TYPE volatile *)lhs;                                       \
-      new_value = old_value OP rhs;                                            \
+      new_value = (TYPE)(old_value OP rhs);                                    \
     }                                                                          \
   }
 
@@ -802,14 +819,14 @@ static inline void operator/=(kmp_cmplx128_a16_t &lhs,
     old_value.vvv = (kmp_int##BITS *)&old_value.cmp;                           \
     new_value.vvv = (kmp_int##BITS *)&new_value.cmp;                           \
     *old_value.vvv = *(volatile kmp_int##BITS *)lhs;                           \
-    new_value.cmp = old_value.cmp OP rhs;                                      \
+    new_value.cmp = (TYPE)(old_value.cmp OP rhs);                              \
     while (!KMP_COMPARE_AND_STORE_ACQ##BITS(                                   \
         (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) old_value.vvv,   \
         *VOLATILE_CAST(kmp_int##BITS *) new_value.vvv)) {                      \
       KMP_DO_PAUSE;                                                            \
                                                                                \
       *old_value.vvv = *(volatile kmp_int##BITS *)lhs;                         \
-      new_value.cmp = old_value.cmp OP rhs;                                    \
+      new_value.cmp = (TYPE)(old_value.cmp OP rhs);                            \
     }                                                                          \
   }
 // end of the first part of the workaround for C78287
@@ -822,7 +839,7 @@ static inline void operator/=(kmp_cmplx128_a16_t &lhs,
 #define ATOMIC_FIXED_ADD(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK,         \
                          GOMP_FLAG)                                            \
   ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void)                                     \
-  OP_GOMP_CRITICAL(OP## =, GOMP_FLAG)                                          \
+  OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG)                                 \
   /* OP used as a sign for subtraction: (lhs-rhs) --> (lhs+-rhs) */            \
   KMP_TEST_THEN_ADD##BITS(lhs, OP rhs);                                        \
   }
@@ -830,7 +847,7 @@ static inline void operator/=(kmp_cmplx128_a16_t &lhs,
 #define ATOMIC_CMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK,           \
                        GOMP_FLAG)                                              \
   ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void)                                     \
-  OP_GOMP_CRITICAL(OP## =, GOMP_FLAG)                                          \
+  OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG)                                 \
   OP_CMPXCHG(TYPE, BITS, OP)                                                   \
   }
 #if USE_CMPXCHG_FIX
@@ -839,7 +856,7 @@ static inline void operator/=(kmp_cmplx128_a16_t &lhs,
 #define ATOMIC_CMPXCHG_WORKAROUND(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID,      \
                                   MASK, GOMP_FLAG)                             \
   ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void)                                     \
-  OP_GOMP_CRITICAL(OP## =, GOMP_FLAG)                                          \
+  OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG)                                 \
   OP_CMPXCHG_WORKAROUND(TYPE, BITS, OP)                                        \
   }
 // end of the second part of the workaround for C78287
@@ -851,25 +868,27 @@ static inline void operator/=(kmp_cmplx128_a16_t &lhs,
 #define ATOMIC_FIXED_ADD(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK,         \
                          GOMP_FLAG)                                            \
   ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void)                                     \
-  OP_GOMP_CRITICAL(OP## =, GOMP_FLAG)                                          \
+  OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG)                                 \
   if (!((kmp_uintptr_t)lhs & 0x##MASK)) {                                      \
     /* OP used as a sign for subtraction: (lhs-rhs) --> (lhs+-rhs) */          \
     KMP_TEST_THEN_ADD##BITS(lhs, OP rhs);                                      \
   } else {                                                                     \
     KMP_CHECK_GTID;                                                            \
-    OP_CRITICAL(OP## =, LCK_ID) /* unaligned address - use critical */         \
+    OP_UPDATE_CRITICAL(TYPE, OP,                                               \
+                       LCK_ID) /* unaligned address - use critical */          \
   }                                                                            \
   }
 // -------------------------------------------------------------------------
 #define ATOMIC_CMPXCHG(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK,           \
                        GOMP_FLAG)                                              \
   ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void)                                     \
-  OP_GOMP_CRITICAL(OP## =, GOMP_FLAG)                                          \
+  OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG)                                 \
   if (!((kmp_uintptr_t)lhs & 0x##MASK)) {                                      \
     OP_CMPXCHG(TYPE, BITS, OP) /* aligned address */                           \
   } else {                                                                     \
     KMP_CHECK_GTID;                                                            \
-    OP_CRITICAL(OP## =, LCK_ID) /* unaligned address - use critical */         \
+    OP_UPDATE_CRITICAL(TYPE, OP,                                               \
+                       LCK_ID) /* unaligned address - use critical */          \
   }                                                                            \
   }
 #if USE_CMPXCHG_FIX
@@ -878,12 +897,13 @@ static inline void operator/=(kmp_cmplx128_a16_t &lhs,
 #define ATOMIC_CMPXCHG_WORKAROUND(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID,      \
                                   MASK, GOMP_FLAG)                             \
   ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void)                                     \
-  OP_GOMP_CRITICAL(OP## =, GOMP_FLAG)                                          \
+  OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG)                                 \
   if (!((kmp_uintptr_t)lhs & 0x##MASK)) {                                      \
     OP_CMPXCHG(TYPE, BITS, OP) /* aligned address */                           \
   } else {                                                                     \
     KMP_CHECK_GTID;                                                            \
-    OP_CRITICAL(OP## =, LCK_ID) /* unaligned address - use critical */         \
+    OP_UPDATE_CRITICAL(TYPE, OP,                                               \
+                       LCK_ID) /* unaligned address - use critical */          \
   }                                                                            \
   }
 // end of the second part of the workaround for C78287
@@ -1200,8 +1220,8 @@ MIN_MAX_CRITICAL(float16, min_a16, Quad_a16_t, >, 16r,
 // OP ignored for critical sections, ^=~ used instead
 #define ATOMIC_CRIT_EQV(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG)           \
   ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void)                                     \
-  OP_GOMP_CRITICAL(^= ~, GOMP_FLAG) /* send assignment */                      \
-  OP_CRITICAL(^= ~, LCK_ID) /* send assignment and complement */               \
+  OP_GOMP_CRITICAL(^= (TYPE) ~, GOMP_FLAG) /* send assignment */               \
+  OP_CRITICAL(^= (TYPE) ~, LCK_ID) /* send assignment and complement */        \
   }
 
 // ------------------------------------------------------------------------
@@ -1211,7 +1231,7 @@ MIN_MAX_CRITICAL(float16, min_a16, Quad_a16_t, >, 16r,
 #define ATOMIC_CMPX_EQV(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK,          \
                         GOMP_FLAG)                                             \
   ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void)                                     \
-  OP_GOMP_CRITICAL(^= ~, GOMP_FLAG) /* send assignment */                      \
+  OP_GOMP_CRITICAL(^= (TYPE) ~, GOMP_FLAG) /* send assignment */               \
   OP_CMPXCHG(TYPE, BITS, OP)                                                   \
   }
 // ------------------------------------------------------------------------
@@ -1221,12 +1241,12 @@ MIN_MAX_CRITICAL(float16, min_a16, Quad_a16_t, >, 16r,
 #define ATOMIC_CMPX_EQV(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, MASK,          \
                         GOMP_FLAG)                                             \
   ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void)                                     \
-  OP_GOMP_CRITICAL(^= ~, GOMP_FLAG)                                            \
+  OP_GOMP_CRITICAL(^= (TYPE) ~, GOMP_FLAG)                                     \
   if (!((kmp_uintptr_t)lhs & 0x##MASK)) {                                      \
     OP_CMPXCHG(TYPE, BITS, OP) /* aligned address */                           \
   } else {                                                                     \
     KMP_CHECK_GTID;                                                            \
-    OP_CRITICAL(^= ~, LCK_ID) /* unaligned address - use critical */           \
+    OP_CRITICAL(^= (TYPE) ~, LCK_ID) /* unaligned address - use critical */    \
   }                                                                            \
   }
 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
@@ -1256,8 +1276,8 @@ ATOMIC_CMPX_EQV(fixed8, eqv, kmp_int64, 64, ^~, 8i, 7,
 //     LCK_ID  - lock identifier, used to possibly distinguish lock variable
 #define ATOMIC_CRITICAL(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG)           \
   ATOMIC_BEGIN(TYPE_ID, OP_ID, TYPE, void)                                     \
-  OP_GOMP_CRITICAL(OP## =, GOMP_FLAG) /* send assignment */                    \
-  OP_CRITICAL(OP## =, LCK_ID) /* send assignment */                            \
+  OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) /* send assignment */           \
+  OP_UPDATE_CRITICAL(TYPE, OP, LCK_ID) /* send assignment */                   \
   }
 
 /* ------------------------------------------------------------------------- */
@@ -1354,22 +1374,23 @@ ATOMIC_CRITICAL(cmplx16, div_a16, kmp_cmplx128_a16_t, /, 32c,
 //     LCK_ID - lock identifier
 // Note: don't check gtid as it should always be valid
 // 1, 2-byte - expect valid parameter, other - check before this macro
-#define OP_CRITICAL_REV(OP, LCK_ID)                                            \
+#define OP_CRITICAL_REV(TYPE, OP, LCK_ID)                                      \
   __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);                       \
                                                                                \
-  (*lhs) = (rhs)OP(*lhs);                                                      \
+  (*lhs) = (TYPE)((rhs)OP(*lhs));                                              \
                                                                                \
   __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);
 
 #ifdef KMP_GOMP_COMPAT
-#define OP_GOMP_CRITICAL_REV(OP, FLAG)                                         \
+#define OP_GOMP_CRITICAL_REV(TYPE, OP, FLAG)                                   \
   if ((FLAG) && (__kmp_atomic_mode == 2)) {                                    \
     KMP_CHECK_GTID;                                                            \
-    OP_CRITICAL_REV(OP, 0);                                                    \
+    OP_CRITICAL_REV(TYPE, OP, 0);                                              \
     return;                                                                    \
   }
+
 #else
-#define OP_GOMP_CRITICAL_REV(OP, FLAG)
+#define OP_GOMP_CRITICAL_REV(TYPE, OP, FLAG)
 #endif /* KMP_GOMP_COMPAT */
 
 // Beginning of a definition (provides name, parameters, gebug trace)
@@ -1396,7 +1417,7 @@ ATOMIC_CRITICAL(cmplx16, div_a16, kmp_cmplx128_a16_t, /, 32c,
     TYPE old_value, new_value;                                                 \
     temp_val = *lhs;                                                           \
     old_value = temp_val;                                                      \
-    new_value = rhs OP old_value;                                              \
+    new_value = (TYPE)(rhs OP old_value);                                      \
     while (!KMP_COMPARE_AND_STORE_ACQ##BITS(                                   \
         (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value,     \
         *VOLATILE_CAST(kmp_int##BITS *) & new_value)) {                        \
@@ -1404,14 +1425,14 @@ ATOMIC_CRITICAL(cmplx16, div_a16, kmp_cmplx128_a16_t, /, 32c,
                                                                                \
       temp_val = *lhs;                                                         \
       old_value = temp_val;                                                    \
-      new_value = rhs OP old_value;                                            \
+      new_value = (TYPE)(rhs OP old_value);                                    \
     }                                                                          \
   }
 
 // -------------------------------------------------------------------------
 #define ATOMIC_CMPXCHG_REV(TYPE_ID, OP_ID, TYPE, BITS, OP, LCK_ID, GOMP_FLAG)  \
   ATOMIC_BEGIN_REV(TYPE_ID, OP_ID, TYPE, void)                                 \
-  OP_GOMP_CRITICAL_REV(OP, GOMP_FLAG)                                          \
+  OP_GOMP_CRITICAL_REV(TYPE, OP, GOMP_FLAG)                                    \
   OP_CMPXCHG_REV(TYPE, BITS, OP)                                               \
   }
 
@@ -1500,8 +1521,8 @@ ATOMIC_CMPXCHG_REV(float8, sub, kmp_real64, 64, -, 8r,
 //     LCK_ID  - lock identifier, used to possibly distinguish lock variable
 #define ATOMIC_CRITICAL_REV(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG)       \
   ATOMIC_BEGIN_REV(TYPE_ID, OP_ID, TYPE, void)                                 \
-  OP_GOMP_CRITICAL_REV(OP, GOMP_FLAG)                                          \
-  OP_CRITICAL_REV(OP, LCK_ID)                                                  \
+  OP_GOMP_CRITICAL_REV(TYPE, OP, GOMP_FLAG)                                    \
+  OP_CRITICAL_REV(TYPE, OP, LCK_ID)                                            \
   }
 
 /* ------------------------------------------------------------------------- */
@@ -1576,8 +1597,8 @@ ATOMIC_CRITICAL_REV(cmplx16, div_a16, kmp_cmplx128_a16_t, /, 32c,
 #define ATOMIC_CRITICAL_FP(TYPE_ID, TYPE, OP_ID, OP, RTYPE_ID, RTYPE, LCK_ID,  \
                            GOMP_FLAG)                                          \
   ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE)                      \
-  OP_GOMP_CRITICAL(OP## =, GOMP_FLAG) /* send assignment */                    \
-  OP_CRITICAL(OP## =, LCK_ID) /* send assignment */                            \
+  OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG) /* send assignment */           \
+  OP_UPDATE_CRITICAL(TYPE, OP, LCK_ID) /* send assignment */                   \
   }
 
 // -------------------------------------------------------------------------
@@ -1587,7 +1608,7 @@ ATOMIC_CRITICAL_REV(cmplx16, div_a16, kmp_cmplx128_a16_t, /, 32c,
 #define ATOMIC_CMPXCHG_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE,    \
                            LCK_ID, MASK, GOMP_FLAG)                            \
   ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE)                      \
-  OP_GOMP_CRITICAL(OP## =, GOMP_FLAG)                                          \
+  OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG)                                 \
   OP_CMPXCHG(TYPE, BITS, OP)                                                   \
   }
 // -------------------------------------------------------------------------
@@ -1597,12 +1618,13 @@ ATOMIC_CRITICAL_REV(cmplx16, div_a16, kmp_cmplx128_a16_t, /, 32c,
 #define ATOMIC_CMPXCHG_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE,    \
                            LCK_ID, MASK, GOMP_FLAG)                            \
   ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE)                      \
-  OP_GOMP_CRITICAL(OP## =, GOMP_FLAG)                                          \
+  OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG)                                 \
   if (!((kmp_uintptr_t)lhs & 0x##MASK)) {                                      \
     OP_CMPXCHG(TYPE, BITS, OP) /* aligned address */                           \
   } else {                                                                     \
     KMP_CHECK_GTID;                                                            \
-    OP_CRITICAL(OP## =, LCK_ID) /* unaligned address - use critical */         \
+    OP_UPDATE_CRITICAL(TYPE, OP,                                               \
+                       LCK_ID) /* unaligned address - use critical */          \
   }                                                                            \
   }
 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
@@ -1613,14 +1635,14 @@ ATOMIC_CRITICAL_REV(cmplx16, div_a16, kmp_cmplx128_a16_t, /, 32c,
 #define ATOMIC_CMPXCHG_REV_MIX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID,       \
                                RTYPE, LCK_ID, MASK, GOMP_FLAG)                 \
   ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE)                      \
-  OP_GOMP_CRITICAL_REV(OP, GOMP_FLAG)                                          \
+  OP_GOMP_CRITICAL_REV(TYPE, OP, GOMP_FLAG)                                    \
   OP_CMPXCHG_REV(TYPE, BITS, OP)                                               \
   }
 #define ATOMIC_CRITICAL_REV_FP(TYPE_ID, TYPE, OP_ID, OP, RTYPE_ID, RTYPE,      \
                                LCK_ID, GOMP_FLAG)                              \
   ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE)                      \
-  OP_GOMP_CRITICAL_REV(OP, GOMP_FLAG)                                          \
-  OP_CRITICAL_REV(OP, LCK_ID)                                                  \
+  OP_GOMP_CRITICAL_REV(TYPE, OP, GOMP_FLAG)                                    \
+  OP_CRITICAL_REV(TYPE, OP, LCK_ID)                                            \
   }
 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
 
@@ -1812,7 +1834,7 @@ ATOMIC_CRITICAL_REV_FP(float10, long double, div_rev, /, fp, _Quad, 10r,
 #define ATOMIC_CMPXCHG_CMPLX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE,  \
                              LCK_ID, MASK, GOMP_FLAG)                          \
   ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE)                      \
-  OP_GOMP_CRITICAL(OP## =, GOMP_FLAG)                                          \
+  OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG)                                 \
   OP_CMPXCHG_WORKAROUND(TYPE, BITS, OP)                                        \
   }
 // end of the second part of the workaround for C78287
@@ -1820,7 +1842,7 @@ ATOMIC_CRITICAL_REV_FP(float10, long double, div_rev, /, fp, _Quad, 10r,
 #define ATOMIC_CMPXCHG_CMPLX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE,  \
                              LCK_ID, MASK, GOMP_FLAG)                          \
   ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE)                      \
-  OP_GOMP_CRITICAL(OP## =, GOMP_FLAG)                                          \
+  OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG)                                 \
   OP_CMPXCHG(TYPE, BITS, OP)                                                   \
   }
 #endif // USE_CMPXCHG_FIX
@@ -1830,12 +1852,13 @@ ATOMIC_CRITICAL_REV_FP(float10, long double, div_rev, /, fp, _Quad, 10r,
 #define ATOMIC_CMPXCHG_CMPLX(TYPE_ID, TYPE, OP_ID, BITS, OP, RTYPE_ID, RTYPE,  \
                              LCK_ID, MASK, GOMP_FLAG)                          \
   ATOMIC_BEGIN_MIX(TYPE_ID, TYPE, OP_ID, RTYPE_ID, RTYPE)                      \
-  OP_GOMP_CRITICAL(OP## =, GOMP_FLAG)                                          \
+  OP_UPDATE_GOMP_CRITICAL(TYPE, OP, GOMP_FLAG)                                 \
   if (!((kmp_uintptr_t)lhs & 0x##MASK)) {                                      \
     OP_CMPXCHG(TYPE, BITS, OP) /* aligned address */                           \
   } else {                                                                     \
     KMP_CHECK_GTID;                                                            \
-    OP_CRITICAL(OP## =, LCK_ID) /* unaligned address - use critical */         \
+    OP_UPDATE_CRITICAL(TYPE, OP,                                               \
+                       LCK_ID) /* unaligned address - use critical */          \
   }                                                                            \
   }
 #endif /* KMP_ARCH_X86 || KMP_ARCH_X86_64 */
@@ -2172,15 +2195,29 @@ ATOMIC_CRITICAL_WR(cmplx16, a16_wr, kmp_cmplx128_a16_t, =, 32c,
   __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);                       \
   return new_value;
 
+#define OP_UPDATE_CRITICAL_CPT(TYPE, OP, LCK_ID)                               \
+  __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);                       \
+                                                                               \
+  if (flag) {                                                                  \
+    (*lhs) = (TYPE)((*lhs)OP rhs);                                             \
+    new_value = (*lhs);                                                        \
+  } else {                                                                     \
+    new_value = (*lhs);                                                        \
+    (*lhs) = (TYPE)((*lhs)OP rhs);                                             \
+  }                                                                            \
+                                                                               \
+  __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);                       \
+  return new_value;
+
 // ------------------------------------------------------------------------
 #ifdef KMP_GOMP_COMPAT
-#define OP_GOMP_CRITICAL_CPT(OP, FLAG)                                         \
+#define OP_GOMP_CRITICAL_CPT(TYPE, OP, FLAG)                                   \
   if ((FLAG) && (__kmp_atomic_mode == 2)) {                                    \
     KMP_CHECK_GTID;                                                            \
-    OP_CRITICAL_CPT(OP## =, 0);                                                \
+    OP_UPDATE_CRITICAL_CPT(TYPE, OP, 0);                                       \
   }
 #else
-#define OP_GOMP_CRITICAL_CPT(OP, FLAG)
+#define OP_GOMP_CRITICAL_CPT(TYPE, OP, FLAG)
 #endif /* KMP_GOMP_COMPAT */
 
 // ------------------------------------------------------------------------
@@ -2196,7 +2233,7 @@ ATOMIC_CRITICAL_WR(cmplx16, a16_wr, kmp_cmplx128_a16_t, =, 32c,
     TYPE old_value, new_value;                                                 \
     temp_val = *lhs;                                                           \
     old_value = temp_val;                                                      \
-    new_value = old_value OP rhs;                                              \
+    new_value = (TYPE)(old_value OP rhs);                                      \
     while (!KMP_COMPARE_AND_STORE_ACQ##BITS(                                   \
         (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value,     \
         *VOLATILE_CAST(kmp_int##BITS *) & new_value)) {                        \
@@ -2204,7 +2241,7 @@ ATOMIC_CRITICAL_WR(cmplx16, a16_wr, kmp_cmplx128_a16_t, =, 32c,
                                                                                \
       temp_val = *lhs;                                                         \
       old_value = temp_val;                                                    \
-      new_value = old_value OP rhs;                                            \
+      new_value = (TYPE)(old_value OP rhs);                                    \
     }                                                                          \
     if (flag) {                                                                \
       return new_value;                                                        \
@@ -2216,7 +2253,7 @@ ATOMIC_CRITICAL_WR(cmplx16, a16_wr, kmp_cmplx128_a16_t, =, 32c,
 #define ATOMIC_CMPXCHG_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG)          \
   ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE)                                 \
   TYPE new_value;                                                              \
-  OP_GOMP_CRITICAL_CPT(OP, GOMP_FLAG)                                          \
+  OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG)                                    \
   OP_CMPXCHG_CPT(TYPE, BITS, OP)                                               \
   }
 
@@ -2224,7 +2261,7 @@ ATOMIC_CRITICAL_WR(cmplx16, a16_wr, kmp_cmplx128_a16_t, =, 32c,
 #define ATOMIC_FIXED_ADD_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG)        \
   ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE)                                 \
   TYPE old_value, new_value;                                                   \
-  OP_GOMP_CRITICAL_CPT(OP, GOMP_FLAG)                                          \
+  OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG)                                    \
   /* OP used as a sign for subtraction: (lhs-rhs) --> (lhs+-rhs) */            \
   old_value = KMP_TEST_THEN_ADD##BITS(lhs, OP rhs);                            \
   if (flag) {                                                                  \
@@ -2375,7 +2412,7 @@ ATOMIC_CMPXCHG_CPT(float8, mul_cpt, kmp_real64, 64, *,
                                RTYPE, LCK_ID, MASK, GOMP_FLAG)                 \
   ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE)                  \
   TYPE new_value;                                                              \
-  OP_GOMP_CRITICAL_CPT(OP, GOMP_FLAG)                                          \
+  OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG)                                    \
   OP_CMPXCHG_CPT(TYPE, BITS, OP)                                               \
   }
 
@@ -2384,8 +2421,8 @@ ATOMIC_CMPXCHG_CPT(float8, mul_cpt, kmp_real64, 64, *,
                                 LCK_ID, GOMP_FLAG)                             \
   ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE)                  \
   TYPE new_value;                                                              \
-  OP_GOMP_CRITICAL_CPT(OP, GOMP_FLAG) /* send assignment */                    \
-  OP_CRITICAL_CPT(OP## =, LCK_ID) /* send assignment */                        \
+  OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG) /* send assignment */              \
+  OP_UPDATE_CRITICAL_CPT(TYPE, OP, LCK_ID) /* send assignment */               \
   }
 
 ATOMIC_CMPXCHG_CPT_MIX(fixed1, char, add_cpt, 8, +, fp, _Quad, 1i, 0,
@@ -2673,7 +2710,7 @@ MIN_MAX_CRITICAL_CPT(float16, min_a16_cpt, Quad_a16_t, >, 16r,
 #define ATOMIC_CMPX_EQV_CPT(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG)         \
   ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE)                                 \
   TYPE new_value;                                                              \
-  OP_GOMP_CRITICAL_EQV_CPT(^= ~, GOMP_FLAG) /* send assignment */              \
+  OP_GOMP_CRITICAL_EQV_CPT(^= (TYPE) ~, GOMP_FLAG) /* send assignment */       \
   OP_CMPXCHG_CPT(TYPE, BITS, OP)                                               \
   }
 
@@ -2705,8 +2742,8 @@ ATOMIC_CMPX_EQV_CPT(fixed8, eqv_cpt, kmp_int64, 64, ^~,
 #define ATOMIC_CRITICAL_CPT(TYPE_ID, OP_ID, TYPE, OP, LCK_ID, GOMP_FLAG)       \
   ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE)                                 \
   TYPE new_value;                                                              \
-  OP_GOMP_CRITICAL_CPT(OP, GOMP_FLAG) /* send assignment */                    \
-  OP_CRITICAL_CPT(OP## =, LCK_ID) /* send assignment */                        \
+  OP_GOMP_CRITICAL_CPT(TYPE, OP, GOMP_FLAG) /* send assignment */              \
+  OP_UPDATE_CRITICAL_CPT(TYPE, OP, LCK_ID) /* send assignment */               \
   }
 
 // ------------------------------------------------------------------------
@@ -2843,29 +2880,29 @@ ATOMIC_CRITICAL_CPT(cmplx16, div_a16_cpt, kmp_cmplx128_a16_t, /, 32c,
 //     LCK_ID - lock identifier
 // Note: don't check gtid as it should always be valid
 // 1, 2-byte - expect valid parameter, other - check before this macro
-#define OP_CRITICAL_CPT_REV(OP, LCK_ID)                                        \
+#define OP_CRITICAL_CPT_REV(TYPE, OP, LCK_ID)                                  \
   __kmp_acquire_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);                       \
                                                                                \
   if (flag) {                                                                  \
     /*temp_val = (*lhs);*/                                                     \
-    (*lhs) = (rhs)OP(*lhs);                                                    \
+    (*lhs) = (TYPE)((rhs)OP(*lhs));                                            \
     new_value = (*lhs);                                                        \
   } else {                                                                     \
     new_value = (*lhs);                                                        \
-    (*lhs) = (rhs)OP(*lhs);                                                    \
+    (*lhs) = (TYPE)((rhs)OP(*lhs));                                            \
   }                                                                            \
   __kmp_release_atomic_lock(&ATOMIC_LOCK##LCK_ID, gtid);                       \
   return new_value;
 
 // ------------------------------------------------------------------------
 #ifdef KMP_GOMP_COMPAT
-#define OP_GOMP_CRITICAL_CPT_REV(OP, FLAG)                                     \
+#define OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, FLAG)                               \
   if ((FLAG) && (__kmp_atomic_mode == 2)) {                                    \
     KMP_CHECK_GTID;                                                            \
-    OP_CRITICAL_CPT_REV(OP, 0);                                                \
+    OP_CRITICAL_CPT_REV(TYPE, OP, 0);                                          \
   }
 #else
-#define OP_GOMP_CRITICAL_CPT_REV(OP, FLAG)
+#define OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, FLAG)
 #endif /* KMP_GOMP_COMPAT */
 
 // ------------------------------------------------------------------------
@@ -2881,7 +2918,7 @@ ATOMIC_CRITICAL_CPT(cmplx16, div_a16_cpt, kmp_cmplx128_a16_t, /, 32c,
     TYPE old_value, new_value;                                                 \
     temp_val = *lhs;                                                           \
     old_value = temp_val;                                                      \
-    new_value = rhs OP old_value;                                              \
+    new_value = (TYPE)(rhs OP old_value);                                      \
     while (!KMP_COMPARE_AND_STORE_ACQ##BITS(                                   \
         (kmp_int##BITS *)lhs, *VOLATILE_CAST(kmp_int##BITS *) & old_value,     \
         *VOLATILE_CAST(kmp_int##BITS *) & new_value)) {                        \
@@ -2889,7 +2926,7 @@ ATOMIC_CRITICAL_CPT(cmplx16, div_a16_cpt, kmp_cmplx128_a16_t, /, 32c,
                                                                                \
       temp_val = *lhs;                                                         \
       old_value = temp_val;                                                    \
-      new_value = rhs OP old_value;                                            \
+      new_value = (TYPE)(rhs OP old_value);                                    \
     }                                                                          \
     if (flag) {                                                                \
       return new_value;                                                        \
@@ -2901,7 +2938,7 @@ ATOMIC_CRITICAL_CPT(cmplx16, div_a16_cpt, kmp_cmplx128_a16_t, /, 32c,
 #define ATOMIC_CMPXCHG_CPT_REV(TYPE_ID, OP_ID, TYPE, BITS, OP, GOMP_FLAG)      \
   ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE)                                 \
   TYPE new_value;                                                              \
-  OP_GOMP_CRITICAL_CPT_REV(OP, GOMP_FLAG)                                      \
+  OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, GOMP_FLAG)                                \
   OP_CMPXCHG_CPT_REV(TYPE, BITS, OP)                                           \
   }
 
@@ -2973,8 +3010,8 @@ ATOMIC_CMPXCHG_CPT_REV(float8, sub_cpt_rev, kmp_real64, 64, -,
   ATOMIC_BEGIN_CPT(TYPE_ID, OP_ID, TYPE, TYPE)                                 \
   TYPE new_value;                                                              \
   /*printf("__kmp_atomic_mode = %d\n", __kmp_atomic_mode);*/                   \
-  OP_GOMP_CRITICAL_CPT_REV(OP, GOMP_FLAG)                                      \
-  OP_CRITICAL_CPT_REV(OP, LCK_ID)                                              \
+  OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, GOMP_FLAG)                                \
+  OP_CRITICAL_CPT_REV(TYPE, OP, LCK_ID)                                        \
   }
 
 /* ------------------------------------------------------------------------- */
@@ -3077,7 +3114,7 @@ ATOMIC_CRITICAL_CPT_REV(cmplx16, div_a16_cpt_rev, kmp_cmplx128_a16_t, /, 32c,
                                    RTYPE, LCK_ID, MASK, GOMP_FLAG)             \
   ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE)                  \
   TYPE new_value;                                                              \
-  OP_GOMP_CRITICAL_CPT_REV(OP, GOMP_FLAG)                                      \
+  OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, GOMP_FLAG)                                \
   OP_CMPXCHG_CPT_REV(TYPE, BITS, OP)                                           \
   }
 
@@ -3086,8 +3123,8 @@ ATOMIC_CRITICAL_CPT_REV(cmplx16, div_a16_cpt_rev, kmp_cmplx128_a16_t, /, 32c,
                                     LCK_ID, GOMP_FLAG)                         \
   ATOMIC_BEGIN_CPT_MIX(TYPE_ID, OP_ID, TYPE, RTYPE_ID, RTYPE)                  \
   TYPE new_value;                                                              \
-  OP_GOMP_CRITICAL_CPT_REV(OP, GOMP_FLAG) /* send assignment */                \
-  OP_CRITICAL_CPT_REV(OP, LCK_ID) /* send assignment */                        \
+  OP_GOMP_CRITICAL_CPT_REV(TYPE, OP, GOMP_FLAG) /* send assignment */          \
+  OP_CRITICAL_CPT_REV(TYPE, OP, LCK_ID) /* send assignment */                  \
   }
 
 ATOMIC_CMPXCHG_CPT_REV_MIX(fixed1, char, sub_cpt_rev, 8, -, fp, _Quad, 1i, 0,

diff  --git a/openmp/runtime/src/kmp_stats.h b/openmp/runtime/src/kmp_stats.h
index a36528f3fca3..c281f64eb3ee 100644
--- a/openmp/runtime/src/kmp_stats.h
+++ b/openmp/runtime/src/kmp_stats.h
@@ -422,7 +422,7 @@ class statistic {
   void setOffset(double d) { offset = d; }
 
   void reset() {
-    minVal = std::numeric_limits<double>::max();
+    minVal = (std::numeric_limits<double>::max)();
     maxVal = -minVal;
     meanVal = 0.0;
     m2 = 0.0;


        


More information about the Openmp-commits mailing list