diff options
author | Thomas Gleixner <tglx@linutronix.de> | 2009-12-02 19:49:50 +0100 |
---|---|---|
committer | Thomas Gleixner <tglx@linutronix.de> | 2009-12-14 23:55:32 +0100 |
commit | 445c89514be242b1b0080056d50bdc1b72adeb5c (patch) | |
tree | 96ed062794ad0fb6a649713c83f009eea382e8b2 /arch/sparc/include/asm/spinlock_64.h | |
parent | 6b6b4792f89346e47437682c7ba3438e6681c0f9 (diff) | |
download | talos-obmc-linux-445c89514be242b1b0080056d50bdc1b72adeb5c.tar.gz talos-obmc-linux-445c89514be242b1b0080056d50bdc1b72adeb5c.zip |
locking: Convert raw_spinlock to arch_spinlock
The raw_spin* namespace was taken by lockdep for the architecture
specific implementations. raw_spin_* would be the ideal name space for
the spinlocks which are not converted to sleeping locks in preempt-rt.
Linus suggested to convert the raw_ to arch_ locks and cleanup the
name space instead of using an artifical name like core_spin,
atomic_spin or whatever
No functional change.
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Acked-by: Peter Zijlstra <peterz@infradead.org>
Acked-by: David S. Miller <davem@davemloft.net>
Acked-by: Ingo Molnar <mingo@elte.hu>
Cc: linux-arch@vger.kernel.org
Diffstat (limited to 'arch/sparc/include/asm/spinlock_64.h')
-rw-r--r-- | arch/sparc/include/asm/spinlock_64.h | 8 |
1 files changed, 4 insertions, 4 deletions
diff --git a/arch/sparc/include/asm/spinlock_64.h b/arch/sparc/include/asm/spinlock_64.h index 43e514783582..38e16c40efc4 100644 --- a/arch/sparc/include/asm/spinlock_64.h +++ b/arch/sparc/include/asm/spinlock_64.h @@ -27,7 +27,7 @@ do { rmb(); \ } while((lp)->lock) -static inline void __raw_spin_lock(raw_spinlock_t *lock) +static inline void __raw_spin_lock(arch_spinlock_t *lock) { unsigned long tmp; @@ -46,7 +46,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock) : "memory"); } -static inline int __raw_spin_trylock(raw_spinlock_t *lock) +static inline int __raw_spin_trylock(arch_spinlock_t *lock) { unsigned long result; @@ -59,7 +59,7 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock) return (result == 0UL); } -static inline void __raw_spin_unlock(raw_spinlock_t *lock) +static inline void __raw_spin_unlock(arch_spinlock_t *lock) { __asm__ __volatile__( " stb %%g0, [%0]" @@ -68,7 +68,7 @@ static inline void __raw_spin_unlock(raw_spinlock_t *lock) : "memory"); } -static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags) +static inline void __raw_spin_lock_flags(arch_spinlock_t *lock, unsigned long flags) { unsigned long tmp1, tmp2; |