diff options
author | David Daney <ddaney@caviumnetworks.com> | 2009-07-13 11:15:19 -0700 |
---|---|---|
committer | Ralf Baechle <ralf@linux-mips.org> | 2009-09-17 20:07:50 +0200 |
commit | b791d1193af9772040e592d5aa161790f800b762 (patch) | |
tree | 6adad3d9cdf278a3a1a3418ae75a2864d0cc7f39 /arch/mips/include/asm/local.h | |
parent | f7ade3c168e4f437c11f57be012992bbb0e3075c (diff) | |
download | blackbird-op-linux-b791d1193af9772040e592d5aa161790f800b762.tar.gz blackbird-op-linux-b791d1193af9772040e592d5aa161790f800b762.zip |
MIPS: Allow kernel use of LL/SC to be separate from the presence of LL/SC.
On some CPUs, it is more efficient to disable and enable interrupts in the
kernel rather than use ll/sc for atomic operations. But if we were to set
cpu_has_llsc to false, we would break the userspace futex interface (in
asm/futex.h).
We separate the two concepts, with a new predicate kernel_uses_llsc, that
lets us disable the kernel's use of ll/sc while still allowing the futex
code to use it.
Also there were a couple of cases in bitops.h where we were using ll/sc
unconditionally even if cpu_has_llsc were false.
Signed-off-by: David Daney <ddaney@caviumnetworks.com>
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
Diffstat (limited to 'arch/mips/include/asm/local.h')
-rw-r--r-- | arch/mips/include/asm/local.h | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/arch/mips/include/asm/local.h b/arch/mips/include/asm/local.h index f96fd59e0845..361f4f16c30c 100644 --- a/arch/mips/include/asm/local.h +++ b/arch/mips/include/asm/local.h @@ -29,7 +29,7 @@ static __inline__ long local_add_return(long i, local_t * l) { unsigned long result; - if (cpu_has_llsc && R10000_LLSC_WAR) { + if (kernel_uses_llsc && R10000_LLSC_WAR) { unsigned long temp; __asm__ __volatile__( @@ -43,7 +43,7 @@ static __inline__ long local_add_return(long i, local_t * l) : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) : "Ir" (i), "m" (l->a.counter) : "memory"); - } else if (cpu_has_llsc) { + } else if (kernel_uses_llsc) { unsigned long temp; __asm__ __volatile__( @@ -74,7 +74,7 @@ static __inline__ long local_sub_return(long i, local_t * l) { unsigned long result; - if (cpu_has_llsc && R10000_LLSC_WAR) { + if (kernel_uses_llsc && R10000_LLSC_WAR) { unsigned long temp; __asm__ __volatile__( @@ -88,7 +88,7 @@ static __inline__ long local_sub_return(long i, local_t * l) : "=&r" (result), "=&r" (temp), "=m" (l->a.counter) : "Ir" (i), "m" (l->a.counter) : "memory"); - } else if (cpu_has_llsc) { + } else if (kernel_uses_llsc) { unsigned long temp; __asm__ __volatile__( |