[Openmp-commits] [openmp] r231774 - Adding some 8-bit atomic operations for future use
Andrey Churbanov
Andrey.Churbanov at intel.com
Tue Mar 10 02:03:43 PDT 2015
Author: achurbanov
Date: Tue Mar 10 04:03:42 2015
New Revision: 231774
URL: http://llvm.org/viewvc/llvm-project?rev=231774&view=rev
Log:
Adding some 8-bit atomic operations for future use
Modified:
openmp/trunk/runtime/src/kmp_os.h
openmp/trunk/runtime/src/z_Linux_util.c
openmp/trunk/runtime/src/z_Windows_NT-586_util.c
Modified: openmp/trunk/runtime/src/kmp_os.h
URL: http://llvm.org/viewvc/llvm-project/openmp/trunk/runtime/src/kmp_os.h?rev=231774&r1=231773&r2=231774&view=diff
==============================================================================
--- openmp/trunk/runtime/src/kmp_os.h (original)
+++ openmp/trunk/runtime/src/kmp_os.h Tue Mar 10 04:03:42 2015
@@ -458,6 +458,9 @@ enum kmp_mem_fence_type {
# define KMP_TEST_THEN_DEC_ACQ32(p) InterlockedExchangeAdd( (volatile long *)(p), -1 )
# define KMP_TEST_THEN_ADD32(p, v) InterlockedExchangeAdd( (volatile long *)(p), (v) )
+extern kmp_int8 __kmp_test_then_add8( volatile kmp_int8 *p, kmp_int8 v );
+extern kmp_int8 __kmp_test_then_or8( volatile kmp_int8 *p, kmp_int8 v );
+extern kmp_int8 __kmp_test_then_and8( volatile kmp_int8 *p, kmp_int8 v );
# define KMP_COMPARE_AND_STORE_RET32(p, cv, sv) InterlockedCompareExchange( (volatile long *)(p),(long)(sv),(long)(cv) )
# define KMP_XCHG_FIXED32(p, v) InterlockedExchange( (volatile long *)(p), (long)(v) )
@@ -494,8 +497,11 @@ extern kmp_int32 __kmp_xchg_fixed32( vol
extern kmp_int64 __kmp_xchg_fixed64( volatile kmp_int64 *p, kmp_int64 v );
extern kmp_real32 __kmp_xchg_real32( volatile kmp_real32 *p, kmp_real32 v );
extern kmp_real64 __kmp_xchg_real64( volatile kmp_real64 *p, kmp_real64 v );
+# define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8( (p), (v) )
//# define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32( (p), 1 )
+# define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8( (p), (v) )
+# define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8( (p), (v) )
//# define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32( (p), 1 )
# define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64( (p), 1LL )
# define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64( (p), 1LL )
@@ -544,9 +550,12 @@ extern kmp_real64 __kmp_xchg_real64( vol
#elif (KMP_ASM_INTRINS && KMP_OS_UNIX) || !(KMP_ARCH_X86 || KMP_ARCH_X86_64)
+# define KMP_TEST_THEN_ADD8(p, v) __sync_fetch_and_add( (kmp_int8 *)(p), (v) )
/* cast p to correct type so that proper intrinsic will be used */
# define KMP_TEST_THEN_INC32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 1 )
+# define KMP_TEST_THEN_OR8(p, v) __sync_fetch_and_or( (kmp_int8 *)(p), (v) )
+# define KMP_TEST_THEN_AND8(p, v) __sync_fetch_and_and( (kmp_int8 *)(p), (v) )
# define KMP_TEST_THEN_INC_ACQ32(p) __sync_fetch_and_add( (kmp_int32 *)(p), 1 )
# define KMP_TEST_THEN_INC64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 1LL )
# define KMP_TEST_THEN_INC_ACQ64(p) __sync_fetch_and_add( (kmp_int64 *)(p), 1LL )
@@ -586,6 +595,9 @@ extern kmp_real64 __kmp_xchg_real64( vol
#define KMP_XCHG_FIXED32(p, v) __sync_lock_test_and_set( (volatile kmp_uint32 *)(p), (kmp_uint32)(v) )
#define KMP_XCHG_FIXED64(p, v) __sync_lock_test_and_set( (volatile kmp_uint64 *)(p), (kmp_uint64)(v) )
+extern kmp_int8 __kmp_test_then_add8( volatile kmp_int8 *p, kmp_int8 v );
+extern kmp_int8 __kmp_test_then_or8( volatile kmp_int8 *p, kmp_int8 v );
+extern kmp_int8 __kmp_test_then_and8( volatile kmp_int8 *p, kmp_int8 v );
inline kmp_real32 KMP_XCHG_REAL32( volatile kmp_real32 *p, kmp_real32 v)
{
kmp_int32 tmp = __sync_lock_test_and_set( (kmp_int32*)p, *(kmp_int32*)&v);
@@ -621,9 +633,12 @@ extern kmp_int16 __kmp_xchg_fixed16( vol
extern kmp_int32 __kmp_xchg_fixed32( volatile kmp_int32 *p, kmp_int32 v );
extern kmp_int64 __kmp_xchg_fixed64( volatile kmp_int64 *p, kmp_int64 v );
extern kmp_real32 __kmp_xchg_real32( volatile kmp_real32 *p, kmp_real32 v );
+# define KMP_TEST_THEN_ADD8(p, v) __kmp_test_then_add8( (p), (v) )
extern kmp_real64 __kmp_xchg_real64( volatile kmp_real64 *p, kmp_real64 v );
# define KMP_TEST_THEN_INC32(p) __kmp_test_then_add32( (p), 1 )
+# define KMP_TEST_THEN_OR8(p, v) __kmp_test_then_or8( (p), (v) )
+# define KMP_TEST_THEN_AND8(p, v) __kmp_test_then_and8( (p), (v) )
# define KMP_TEST_THEN_INC_ACQ32(p) __kmp_test_then_add32( (p), 1 )
# define KMP_TEST_THEN_INC64(p) __kmp_test_then_add64( (p), 1LL )
# define KMP_TEST_THEN_INC_ACQ64(p) __kmp_test_then_add64( (p), 1LL )
@@ -713,6 +728,8 @@ extern kmp_real64 __kmp_xchg_real64( vol
# define KMP_LD_ACQ64(A) ( *(A) )
#endif
+#define TCR_1(a) (a)
+#define TCW_1(a,b) (a) = (b)
/* ------------------------------------------------------------------------ */
//
// FIXME - maybe this should this be
Modified: openmp/trunk/runtime/src/z_Linux_util.c
URL: http://llvm.org/viewvc/llvm-project/openmp/trunk/runtime/src/z_Linux_util.c?rev=231774&r1=231773&r2=231774&view=diff
==============================================================================
--- openmp/trunk/runtime/src/z_Linux_util.c (original)
+++ openmp/trunk/runtime/src/z_Linux_util.c Tue Mar 10 04:03:42 2015
@@ -437,6 +437,40 @@ __kmp_change_thread_affinity_mask( int g
if ( old_mask != NULL ) {
status = __kmp_get_system_affinity( old_mask, TRUE );
int error = errno;
+kmp_int8
+__kmp_test_then_or8( volatile kmp_int8 *p, kmp_int8 d )
+{
+ kmp_int8 old_value, new_value;
+
+ old_value = TCR_1( *p );
+ new_value = old_value | d;
+
+ while ( ! KMP_COMPARE_AND_STORE_REL8 ( p, old_value, new_value ) )
+ {
+ KMP_CPU_PAUSE();
+ old_value = TCR_1( *p );
+ new_value = old_value | d;
+ }
+ return old_value;
+}
+
+kmp_int8
+__kmp_test_then_and8( volatile kmp_int8 *p, kmp_int8 d )
+{
+ kmp_int8 old_value, new_value;
+
+ old_value = TCR_1( *p );
+ new_value = old_value & d;
+
+ while ( ! KMP_COMPARE_AND_STORE_REL8 ( p, old_value, new_value ) )
+ {
+ KMP_CPU_PAUSE();
+ old_value = TCR_1( *p );
+ new_value = old_value & d;
+ }
+ return old_value;
+}
+
if ( status != 0 ) {
__kmp_msg(
kmp_ms_fatal,
@@ -472,6 +506,23 @@ __kmp_change_thread_affinity_mask( int g
#if KMP_OS_LINUX && (KMP_ARCH_X86 || KMP_ARCH_X86_64 || KMP_ARCH_ARM || KMP_ARCH_AARCH64) && !KMP_OS_CNK
+kmp_int8
+__kmp_test_then_add8( volatile kmp_int8 *p, kmp_int8 d )
+{
+ kmp_int8 old_value, new_value;
+
+ old_value = TCR_1( *p );
+ new_value = old_value + d;
+
+ while ( ! KMP_COMPARE_AND_STORE_REL8 ( p, old_value, new_value ) )
+ {
+ KMP_CPU_PAUSE();
+ old_value = TCR_1( *p );
+ new_value = old_value + d;
+ }
+ return old_value;
+}
+
int
__kmp_futex_determine_capable()
{
Modified: openmp/trunk/runtime/src/z_Windows_NT-586_util.c
URL: http://llvm.org/viewvc/llvm-project/openmp/trunk/runtime/src/z_Windows_NT-586_util.c?rev=231774&r1=231773&r2=231774&view=diff
==============================================================================
--- openmp/trunk/runtime/src/z_Windows_NT-586_util.c (original)
+++ openmp/trunk/runtime/src/z_Windows_NT-586_util.c Tue Mar 10 04:03:42 2015
@@ -20,6 +20,40 @@
* use compare_and_store for these routines
*/
+kmp_int8
+__kmp_test_then_or8( volatile kmp_int8 *p, kmp_int8 d )
+{
+ kmp_int8 old_value, new_value;
+
+ old_value = TCR_1( *p );
+ new_value = old_value | d;
+
+ while ( ! __kmp_compare_and_store8 ( p, old_value, new_value ) )
+ {
+ KMP_CPU_PAUSE();
+ old_value = TCR_1( *p );
+ new_value = old_value | d;
+ }
+ return old_value;
+}
+
+kmp_int8
+__kmp_test_then_and8( volatile kmp_int8 *p, kmp_int8 d )
+{
+ kmp_int8 old_value, new_value;
+
+ old_value = TCR_1( *p );
+ new_value = old_value & d;
+
+ while ( ! __kmp_compare_and_store8 ( p, old_value, new_value ) )
+ {
+ KMP_CPU_PAUSE();
+ old_value = TCR_1( *p );
+ new_value = old_value & d;
+ }
+ return old_value;
+}
+
kmp_int32
__kmp_test_then_or32( volatile kmp_int32 *p, kmp_int32 d )
{
@@ -34,7 +68,6 @@ __kmp_test_then_or32( volatile kmp_int32
old_value = TCR_4( *p );
new_value = old_value | d;
}
-
return old_value;
}
@@ -55,6 +88,22 @@ __kmp_test_then_and32( volatile kmp_int3
return old_value;
}
+kmp_int8
+__kmp_test_then_add8( volatile kmp_int8 *p, kmp_int8 d )
+{
+ kmp_int64 old_value, new_value;
+
+ old_value = TCR_1( *p );
+ new_value = old_value + d;
+ while ( ! __kmp_compare_and_store8 ( p, old_value, new_value ) )
+ {
+ KMP_CPU_PAUSE();
+ old_value = TCR_1( *p );
+ new_value = old_value + d;
+ }
+ return old_value;
+}
+
#if KMP_ARCH_X86
kmp_int64
__kmp_test_then_add64( volatile kmp_int64 *p, kmp_int64 d )
@@ -69,7 +118,6 @@ __kmp_test_then_add64( volatile kmp_int6
old_value = TCR_8( *p );
new_value = old_value + d;
}
-
return old_value;
}
#endif /* KMP_ARCH_X86 */
More information about the Openmp-commits
mailing list