diff options
author | David Majnemer <david.majnemer@gmail.com> | 2016-06-28 02:54:43 +0000 |
---|---|---|
committer | David Majnemer <david.majnemer@gmail.com> | 2016-06-28 02:54:43 +0000 |
commit | 2916a612cd7b8ba6fc74c07af0e5c89fdb72e9cd (patch) | |
tree | c76fad36ce9378f90e6a785d19e066c66aa046a0 /clang/lib/Headers | |
parent | 7937ef37969f7d46d8626e2b61a6ae8361afbea6 (diff) | |
download | bcm5719-llvm-2916a612cd7b8ba6fc74c07af0e5c89fdb72e9cd.tar.gz bcm5719-llvm-2916a612cd7b8ba6fc74c07af0e5c89fdb72e9cd.zip |
[intrin.h] Certain _Interlocked intrinsics return the old value
This fixes PR28326.
llvm-svn: 273986
Diffstat (limited to 'clang/lib/Headers')
-rw-r--r-- | clang/lib/Headers/intrin.h | 24 |
1 files changed, 12 insertions, 12 deletions
diff --git a/clang/lib/Headers/intrin.h b/clang/lib/Headers/intrin.h index 3033fae1cf7..f18711ad1ec 100644 --- a/clang/lib/Headers/intrin.h +++ b/clang/lib/Headers/intrin.h @@ -666,20 +666,20 @@ _InterlockedDecrement64(__int64 volatile *_Value) { \*----------------------------------------------------------------------------*/ static __inline__ char __DEFAULT_FN_ATTRS _InterlockedAnd8(char volatile *_Value, char _Mask) { - return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST); } static __inline__ short __DEFAULT_FN_ATTRS _InterlockedAnd16(short volatile *_Value, short _Mask) { - return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST); } static __inline__ long __DEFAULT_FN_ATTRS _InterlockedAnd(long volatile *_Value, long _Mask) { - return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST); } #ifdef __x86_64__ static __inline__ __int64 __DEFAULT_FN_ATTRS _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) { - return __atomic_and_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_and(_Value, _Mask, __ATOMIC_SEQ_CST); } #endif /*----------------------------------------------------------------------------*\ @@ -687,20 +687,20 @@ _InterlockedAnd64(__int64 volatile *_Value, __int64 _Mask) { \*----------------------------------------------------------------------------*/ static __inline__ char __DEFAULT_FN_ATTRS _InterlockedOr8(char volatile *_Value, char _Mask) { - return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST); } static __inline__ short __DEFAULT_FN_ATTRS _InterlockedOr16(short volatile *_Value, short _Mask) { - return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST); } static __inline__ long __DEFAULT_FN_ATTRS _InterlockedOr(long volatile *_Value, long _Mask) { - return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST); } #ifdef __x86_64__ static __inline__ __int64 __DEFAULT_FN_ATTRS _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) { - return __atomic_or_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_or(_Value, _Mask, __ATOMIC_SEQ_CST); } #endif /*----------------------------------------------------------------------------*\ @@ -708,20 +708,20 @@ _InterlockedOr64(__int64 volatile *_Value, __int64 _Mask) { \*----------------------------------------------------------------------------*/ static __inline__ char __DEFAULT_FN_ATTRS _InterlockedXor8(char volatile *_Value, char _Mask) { - return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST); } static __inline__ short __DEFAULT_FN_ATTRS _InterlockedXor16(short volatile *_Value, short _Mask) { - return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST); } static __inline__ long __DEFAULT_FN_ATTRS _InterlockedXor(long volatile *_Value, long _Mask) { - return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST); } #ifdef __x86_64__ static __inline__ __int64 __DEFAULT_FN_ATTRS _InterlockedXor64(__int64 volatile *_Value, __int64 _Mask) { - return __atomic_xor_fetch(_Value, _Mask, __ATOMIC_SEQ_CST); + return __atomic_fetch_xor(_Value, _Mask, __ATOMIC_SEQ_CST); } #endif /*----------------------------------------------------------------------------*\ |