diff options
Diffstat (limited to 'include/asm-mips/bitops.h')
-rw-r--r-- | include/asm-mips/bitops.h | 52 |
1 files changed, 51 insertions, 1 deletions
diff --git a/include/asm-mips/bitops.h b/include/asm-mips/bitops.h index 06c08228a525..89436b96ad66 100644 --- a/include/asm-mips/bitops.h +++ b/include/asm-mips/bitops.h @@ -3,7 +3,7 @@ * License. See the file "COPYING" in the main directory of this archive * for more details. * - * Copyright (c) 1994 - 1997, 1999, 2000, 06 Ralf Baechle (ralf@linux-mips.org) + * Copyright (c) 1994 - 1997, 99, 2000, 06, 07 Ralf Baechle (ralf@linux-mips.org) * Copyright (c) 1999, 2000 Silicon Graphics, Inc. */ #ifndef _ASM_BITOPS_H @@ -24,11 +24,15 @@ #define SZLONG_MASK 31UL #define __LL "ll " #define __SC "sc " +#define __INS "ins " +#define __EXT "ext " #elif (_MIPS_SZLONG == 64) #define SZLONG_LOG 6 #define SZLONG_MASK 63UL #define __LL "lld " #define __SC "scd " +#define __INS "dins " +#define __EXT "dext " #endif /* @@ -62,6 +66,19 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) " .set mips0 \n" : "=&r" (temp), "=m" (*m) : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); +#ifdef CONFIG_CPU_MIPSR2 + } else if (__builtin_constant_p(nr)) { + __asm__ __volatile__( + "1: " __LL "%0, %1 # set_bit \n" + " " __INS "%0, %4, %2, 1 \n" + " " __SC "%0, %1 \n" + " beqz %0, 2f \n" + " .subsection 2 \n" + "2: b 1b \n" + " .previous \n" + : "=&r" (temp), "=m" (*m) + : "ir" (nr & SZLONG_MASK), "m" (*m), "r" (~0)); +#endif /* CONFIG_CPU_MIPSR2 */ } else if (cpu_has_llsc) { __asm__ __volatile__( " .set mips3 \n" @@ -113,6 +130,19 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) " .set mips0 \n" : "=&r" (temp), "=m" (*m) : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m)); +#ifdef CONFIG_CPU_MIPSR2 + } else if (__builtin_constant_p(nr)) { + __asm__ __volatile__( + "1: " __LL "%0, %1 # clear_bit \n" + " " __INS "%0, $0, %2, 1 \n" + " " __SC "%0, %1 \n" + " beqz %0, 2f \n" + " .subsection 2 \n" + "2: b 1b \n" + " .previous \n" + : "=&r" (temp), "=m" (*m) + : "ir" (nr & SZLONG_MASK), "m" (*m)); +#endif /* CONFIG_CPU_MIPSR2 */ } else if (cpu_has_llsc) { __asm__ __volatile__( " .set mips3 \n" @@ -291,6 +321,26 @@ static inline int test_and_clear_bit(unsigned long nr, : "memory"); return res != 0; +#ifdef CONFIG_CPU_MIPSR2 + } else if (__builtin_constant_p(nr)) { + unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); + unsigned long temp, res; + + __asm__ __volatile__( + "1: " __LL "%0, %1 # test_and_clear_bit \n" + " " __EXT "%2, %0, %3, 1 \n" + " " __INS "%0, $0, %3, 1 \n" + " " __SC "%0, %1 \n" + " beqz %0, 2f \n" + " .subsection 2 \n" + "2: b 1b \n" + " .previous \n" + : "=&r" (temp), "=m" (*m), "=&r" (res) + : "ri" (nr & SZLONG_MASK), "m" (*m) + : "memory"); + + return res; +#endif } else if (cpu_has_llsc) { unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); unsigned long temp, res; |