summaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
Diffstat (limited to 'include')
-rw-r--r--include/asm-generic/local.h13
-rw-r--r--include/asm-i386/local.h6
-rw-r--r--include/asm-x86_64/local.h10
3 files changed, 18 insertions, 11 deletions
diff --git a/include/asm-generic/local.h b/include/asm-generic/local.h
index de4614840c2c..9291c24f5819 100644
--- a/include/asm-generic/local.h
+++ b/include/asm-generic/local.h
@@ -7,8 +7,15 @@
#include <asm/atomic.h>
#include <asm/types.h>
-/* An unsigned long type for operations which are atomic for a single
- * CPU. Usually used in combination with per-cpu variables. */
+/*
+ * A signed long type for operations which are atomic for a single CPU.
+ * Usually used in combination with per-cpu variables.
+ *
+ * This is the default implementation, which uses atomic_long_t. Which is
+ * rather pointless. The whole point behind local_t is that some processors
+ * can perform atomic adds and subtracts in a manner which is atomic wrt IRQs
+ * running on this CPU. local_t allows exploitation of such capabilities.
+ */
/* Implement in terms of atomics. */
@@ -20,7 +27,7 @@ typedef struct
#define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
-#define local_read(l) ((unsigned long)atomic_long_read(&(l)->a))
+#define local_read(l) atomic_long_read(&(l)->a)
#define local_set(l,i) atomic_long_set((&(l)->a),(i))
#define local_inc(l) atomic_long_inc(&(l)->a)
#define local_dec(l) atomic_long_dec(&(l)->a)
diff --git a/include/asm-i386/local.h b/include/asm-i386/local.h
index 0177da80dde3..e67fa08260fe 100644
--- a/include/asm-i386/local.h
+++ b/include/asm-i386/local.h
@@ -5,7 +5,7 @@
typedef struct
{
- volatile unsigned long counter;
+ volatile long counter;
} local_t;
#define LOCAL_INIT(i) { (i) }
@@ -29,7 +29,7 @@ static __inline__ void local_dec(local_t *v)
:"m" (v->counter));
}
-static __inline__ void local_add(unsigned long i, local_t *v)
+static __inline__ void local_add(long i, local_t *v)
{
__asm__ __volatile__(
"addl %1,%0"
@@ -37,7 +37,7 @@ static __inline__ void local_add(unsigned long i, local_t *v)
:"ir" (i), "m" (v->counter));
}
-static __inline__ void local_sub(unsigned long i, local_t *v)
+static __inline__ void local_sub(long i, local_t *v)
{
__asm__ __volatile__(
"subl %1,%0"
diff --git a/include/asm-x86_64/local.h b/include/asm-x86_64/local.h
index bf148037d4e5..cd17945bf218 100644
--- a/include/asm-x86_64/local.h
+++ b/include/asm-x86_64/local.h
@@ -5,7 +5,7 @@
typedef struct
{
- volatile unsigned long counter;
+ volatile long counter;
} local_t;
#define LOCAL_INIT(i) { (i) }
@@ -13,7 +13,7 @@ typedef struct
#define local_read(v) ((v)->counter)
#define local_set(v,i) (((v)->counter) = (i))
-static __inline__ void local_inc(local_t *v)
+static inline void local_inc(local_t *v)
{
__asm__ __volatile__(
"incq %0"
@@ -21,7 +21,7 @@ static __inline__ void local_inc(local_t *v)
:"m" (v->counter));
}
-static __inline__ void local_dec(local_t *v)
+static inline void local_dec(local_t *v)
{
__asm__ __volatile__(
"decq %0"
@@ -29,7 +29,7 @@ static __inline__ void local_dec(local_t *v)
:"m" (v->counter));
}
-static __inline__ void local_add(unsigned int i, local_t *v)
+static inline void local_add(long i, local_t *v)
{
__asm__ __volatile__(
"addq %1,%0"
@@ -37,7 +37,7 @@ static __inline__ void local_add(unsigned int i, local_t *v)
:"ir" (i), "m" (v->counter));
}
-static __inline__ void local_sub(unsigned int i, local_t *v)
+static inline void local_sub(long i, local_t *v)
{
__asm__ __volatile__(
"subq %1,%0"
OpenPOWER on IntegriCloud