summaryrefslogtreecommitdiffstats
path: root/include/asm-mips
diff options
context:
space:
mode:
authorRalf Baechle <ralf@linux-mips.org>2007-06-07 13:17:30 +0100
committerRalf Baechle <ralf@linux-mips.org>2007-06-11 18:20:55 +0100
commitff72b7a6188088976bf7d77d3309a9b2f1716071 (patch)
tree760ed02dd1f5fc3edb871d0f568c71f82b859155 /include/asm-mips
parente10e0cc8852ac846d5590188b935c98742e5cc43 (diff)
downloadblackbird-op-linux-ff72b7a6188088976bf7d77d3309a9b2f1716071.tar.gz
blackbird-op-linux-ff72b7a6188088976bf7d77d3309a9b2f1716071.zip
[MIPS] Fix smp barriers in test_and_{change,clear,set}_bit
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'include/asm-mips')
-rw-r--r--include/asm-mips/bitops.h51
1 files changed, 19 insertions, 32 deletions
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h
index d995413e11fd..ffe245b4258f 100644
--- a/include/asm-mips/bitops.h
+++ b/include/asm-mips/bitops.h
@@ -238,10 +238,11 @@ static inline int test_and_set_bit(unsigned long nr,
volatile unsigned long *addr)
{
unsigned short bit = nr & SZLONG_MASK;
+ unsigned long res;
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
- unsigned long temp, res;
+ unsigned long temp;
__asm__ __volatile__(
" .set mips3 \n"
@@ -254,11 +255,9 @@ static inline int test_and_set_bit(unsigned long nr,
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << bit), "m" (*m)
: "memory");
-
- return res != 0;
} else if (cpu_has_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
- unsigned long temp, res;
+ unsigned long temp;
__asm__ __volatile__(
" .set push \n"
@@ -277,25 +276,22 @@ static inline int test_and_set_bit(unsigned long nr,
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << bit), "m" (*m)
: "memory");
-
- return res != 0;
} else {
volatile unsigned long *a = addr;
unsigned long mask;
- int retval;
unsigned long flags;
a += nr >> SZLONG_LOG;
mask = 1UL << bit;
raw_local_irq_save(flags);
- retval = (mask & *a) != 0;
+ res = (mask & *a);
*a |= mask;
raw_local_irq_restore(flags);
-
- return retval;
}
smp_mb();
+
+ return res != 0;
}
/*
@@ -310,6 +306,7 @@ static inline int test_and_clear_bit(unsigned long nr,
volatile unsigned long *addr)
{
unsigned short bit = nr & SZLONG_MASK;
+ unsigned long res;
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
@@ -327,12 +324,10 @@ static inline int test_and_clear_bit(unsigned long nr,
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << bit), "m" (*m)
: "memory");
-
- return res != 0;
#ifdef CONFIG_CPU_MIPSR2
} else if (__builtin_constant_p(nr)) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
- unsigned long temp, res;
+ unsigned long temp;
__asm__ __volatile__(
"1: " __LL "%0, %1 # test_and_clear_bit \n"
@@ -346,12 +341,10 @@ static inline int test_and_clear_bit(unsigned long nr,
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "ri" (bit), "m" (*m)
: "memory");
-
- return res;
#endif
} else if (cpu_has_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
- unsigned long temp, res;
+ unsigned long temp;
__asm__ __volatile__(
" .set push \n"
@@ -371,25 +364,22 @@ static inline int test_and_clear_bit(unsigned long nr,
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << bit), "m" (*m)
: "memory");
-
- return res != 0;
} else {
volatile unsigned long *a = addr;
unsigned long mask;
- int retval;
unsigned long flags;
a += nr >> SZLONG_LOG;
mask = 1UL << bit;
raw_local_irq_save(flags);
- retval = (mask & *a) != 0;
+ res = (mask & *a);
*a &= ~mask;
raw_local_irq_restore(flags);
-
- return retval;
}
smp_mb();
+
+ return res != 0;
}
/*
@@ -404,10 +394,11 @@ static inline int test_and_change_bit(unsigned long nr,
volatile unsigned long *addr)
{
unsigned short bit = nr & SZLONG_MASK;
+ unsigned long res;
if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
- unsigned long temp, res;
+ unsigned long temp;
__asm__ __volatile__(
" .set mips3 \n"
@@ -420,11 +411,9 @@ static inline int test_and_change_bit(unsigned long nr,
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << bit), "m" (*m)
: "memory");
-
- return res != 0;
} else if (cpu_has_llsc) {
unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
- unsigned long temp, res;
+ unsigned long temp;
__asm__ __volatile__(
" .set push \n"
@@ -443,24 +432,22 @@ static inline int test_and_change_bit(unsigned long nr,
: "=&r" (temp), "=m" (*m), "=&r" (res)
: "r" (1UL << bit), "m" (*m)
: "memory");
-
- return res != 0;
} else {
volatile unsigned long *a = addr;
- unsigned long mask, retval;
+ unsigned long mask;
unsigned long flags;
a += nr >> SZLONG_LOG;
mask = 1UL << bit;
raw_local_irq_save(flags);
- retval = (mask & *a) != 0;
+ res = (mask & *a);
*a ^= mask;
raw_local_irq_restore(flags);
-
- return retval;
}
smp_mb();
+
+ return res != 0;
}
#include <asm-generic/bitops/non-atomic.h>
OpenPOWER on IntegriCloud