diff options
Diffstat (limited to 'openmp/runtime/src/kmp_lock.h')
-rw-r--r-- | openmp/runtime/src/kmp_lock.h | 17 |
1 files changed, 9 insertions, 8 deletions
diff --git a/openmp/runtime/src/kmp_lock.h b/openmp/runtime/src/kmp_lock.h index 220236de2f5..6a88d7bc52a 100644 --- a/openmp/runtime/src/kmp_lock.h +++ b/openmp/runtime/src/kmp_lock.h @@ -649,7 +649,7 @@ extern int (*__kmp_acquire_user_lock_with_checks_)(kmp_user_lock_p lck, } \ } \ if (lck->tas.lk.poll != 0 || \ - !__kmp_compare_and_store_acq(&lck->tas.lk.poll, 0, gtid + 1)) { \ + !__kmp_atomic_compare_store_acq(&lck->tas.lk.poll, 0, gtid + 1)) { \ kmp_uint32 spins; \ KMP_FSYNC_PREPARE(lck); \ KMP_INIT_YIELD(spins); \ @@ -659,8 +659,8 @@ extern int (*__kmp_acquire_user_lock_with_checks_)(kmp_user_lock_p lck, } else { \ KMP_YIELD_SPIN(spins); \ } \ - while (lck->tas.lk.poll != 0 || \ - !__kmp_compare_and_store_acq(&lck->tas.lk.poll, 0, gtid + 1)) { \ + while (lck->tas.lk.poll != 0 || !__kmp_atomic_compare_store_acq( \ + &lck->tas.lk.poll, 0, gtid + 1)) { \ if (TCR_4(__kmp_nth) > \ (__kmp_avail_proc ? __kmp_avail_proc : __kmp_xproc)) { \ KMP_YIELD(TRUE); \ @@ -702,7 +702,7 @@ static inline int __kmp_test_user_lock_with_checks(kmp_user_lock_p lck, } } return ((lck->tas.lk.poll == 0) && - __kmp_compare_and_store_acq(&lck->tas.lk.poll, 0, gtid + 1)); + __kmp_atomic_compare_store_acq(&lck->tas.lk.poll, 0, gtid + 1)); } else { KMP_DEBUG_ASSERT(__kmp_test_user_lock_with_checks_ != NULL); return (*__kmp_test_user_lock_with_checks_)(lck, gtid); @@ -767,7 +767,7 @@ extern int (*__kmp_acquire_nested_user_lock_with_checks_)(kmp_user_lock_p lck, *depth = KMP_LOCK_ACQUIRED_NEXT; \ } else { \ if ((lck->tas.lk.poll != 0) || \ - !__kmp_compare_and_store_acq(&lck->tas.lk.poll, 0, gtid + 1)) { \ + !__kmp_atomic_compare_store_acq(&lck->tas.lk.poll, 0, gtid + 1)) { \ kmp_uint32 spins; \ KMP_FSYNC_PREPARE(lck); \ KMP_INIT_YIELD(spins); \ @@ -777,8 +777,9 @@ extern int (*__kmp_acquire_nested_user_lock_with_checks_)(kmp_user_lock_p lck, } else { \ KMP_YIELD_SPIN(spins); \ } \ - while ((lck->tas.lk.poll != 0) || \ - !__kmp_compare_and_store_acq(&lck->tas.lk.poll, 0, gtid + 1)) { \ + while ( \ + (lck->tas.lk.poll != 0) || \ + !__kmp_atomic_compare_store_acq(&lck->tas.lk.poll, 0, gtid + 1)) { \ if (TCR_4(__kmp_nth) > \ (__kmp_avail_proc ? __kmp_avail_proc : __kmp_xproc)) { \ KMP_YIELD(TRUE); \ @@ -826,7 +827,7 @@ static inline int __kmp_test_nested_user_lock_with_checks(kmp_user_lock_p lck, return ++lck->tas.lk.depth_locked; /* same owner, depth increased */ } retval = ((lck->tas.lk.poll == 0) && - __kmp_compare_and_store_acq(&lck->tas.lk.poll, 0, gtid + 1)); + __kmp_atomic_compare_store_acq(&lck->tas.lk.poll, 0, gtid + 1)); if (retval) { KMP_MB(); lck->tas.lk.depth_locked = 1; |