diff options
author | Jeremy Fitzhardinge <jeremy@goop.org> | 2011-09-28 11:49:28 -0700 |
---|---|---|
committer | Jeremy Fitzhardinge <jeremy@goop.org> | 2011-11-25 10:42:59 -0800 |
commit | 3d94ae0c70a71a9824479366775e2c7679a57d94 (patch) | |
tree | 041c1013f123fb3177bed6b70375fdc4f6c0ddff /arch/x86/include/asm/cmpxchg.h | |
parent | 4a7f340c6a75ec5fca23d9c80a59f3f28cc4a61e (diff) | |
download | talos-obmc-linux-3d94ae0c70a71a9824479366775e2c7679a57d94.tar.gz talos-obmc-linux-3d94ae0c70a71a9824479366775e2c7679a57d94.zip |
x86/cmpxchg: add a locked add() helper
Mostly to remove some conditional code in spinlock.h.
Signed-off-by: Jeremy Fitzhardinge <jeremy@goop.org>
Diffstat (limited to 'arch/x86/include/asm/cmpxchg.h')
-rw-r--r-- | arch/x86/include/asm/cmpxchg.h | 42 |
1 files changed, 42 insertions, 0 deletions
diff --git a/arch/x86/include/asm/cmpxchg.h b/arch/x86/include/asm/cmpxchg.h index 5d3acdf5a7a6..49eade13161c 100644 --- a/arch/x86/include/asm/cmpxchg.h +++ b/arch/x86/include/asm/cmpxchg.h @@ -14,6 +14,8 @@ extern void __cmpxchg_wrong_size(void) __compiletime_error("Bad argument size for cmpxchg"); extern void __xadd_wrong_size(void) __compiletime_error("Bad argument size for xadd"); +extern void __add_wrong_size(void) + __compiletime_error("Bad argument size for add"); /* * Constants for operation sizes. On 32-bit, the 64-bit size it set to @@ -207,4 +209,44 @@ extern void __xadd_wrong_size(void) #define xadd_sync(ptr, inc) __xadd((ptr), (inc), "lock; ") #define xadd_local(ptr, inc) __xadd((ptr), (inc), "") +#define __add(ptr, inc, lock) \ + ({ \ + __typeof__ (*(ptr)) __ret = (inc); \ + switch (sizeof(*(ptr))) { \ + case __X86_CASE_B: \ + asm volatile (lock "addb %b1, %0\n" \ + : "+m" (*(ptr)) : "ri" (inc) \ + : "memory", "cc"); \ + break; \ + case __X86_CASE_W: \ + asm volatile (lock "addw %w1, %0\n" \ + : "+m" (*(ptr)) : "ri" (inc) \ + : "memory", "cc"); \ + break; \ + case __X86_CASE_L: \ + asm volatile (lock "addl %1, %0\n" \ + : "+m" (*(ptr)) : "ri" (inc) \ + : "memory", "cc"); \ + break; \ + case __X86_CASE_Q: \ + asm volatile (lock "addq %1, %0\n" \ + : "+m" (*(ptr)) : "ri" (inc) \ + : "memory", "cc"); \ + break; \ + default: \ + __add_wrong_size(); \ + } \ + __ret; \ + }) + +/* + * add_*() adds "inc" to "*ptr" + * + * __add() takes a lock prefix + * add_smp() is locked when multiple CPUs are online + * add_sync() is always locked + */ +#define add_smp(ptr, inc) __add((ptr), (inc), LOCK_PREFIX) +#define add_sync(ptr, inc) __add((ptr), (inc), "lock; ") + #endif /* ASM_X86_CMPXCHG_H */ |