1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
|
/*
* Copyright 2004-2011 Analog Devices Inc.
*
* Licensed under the GPL-2 or later.
*/
#ifndef __ARCH_BLACKFIN_ATOMIC__
#define __ARCH_BLACKFIN_ATOMIC__
#include <asm/cmpxchg.h>
#ifdef CONFIG_SMP
#include <asm/barrier.h>
#include <linux/linkage.h>
#include <linux/types.h>
asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);
asmlinkage int __raw_atomic_add_asm(volatile int *ptr, int value);
asmlinkage int __raw_atomic_and_asm(volatile int *ptr, int value);
asmlinkage int __raw_atomic_or_asm(volatile int *ptr, int value);
asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value);
asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);
#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
#define atomic_add_return(i, v) __raw_atomic_add_asm(&(v)->counter, i)
#define atomic_sub_return(i, v) __raw_atomic_add_asm(&(v)->counter, -(i))
#define CONFIG_ARCH_HAS_ATOMIC_OR
#define atomic_or(i, v) (void)__raw_atomic_or_asm(&(v)->counter, i)
#define atomic_and(i, v) (void)__raw_atomic_and_asm(&(v)->counter, i)
#define atomic_xor(i, v) (void)__raw_atomic_xor_asm(&(v)->counter, i)
static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v)
{
atomic_and(~mask, v);
}
static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v)
{
atomic_or(mask, v);
}
#endif
#include <asm-generic/atomic.h>
#endif
|