Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | #ifndef __ARCH_I386_ATOMIC__ |
2 | #define __ARCH_I386_ATOMIC__ | |
3 | ||
1da177e4 LT |
4 | #include <linux/compiler.h> |
5 | #include <asm/processor.h> | |
a436ed9c | 6 | #include <asm/cmpxchg.h> |
1da177e4 LT |
7 | |
8 | /* | |
9 | * Atomic operations that C can't guarantee us. Useful for | |
10 | * resource counting etc.. | |
11 | */ | |
12 | ||
1da177e4 LT |
13 | /* |
14 | * Make sure gcc doesn't try to be clever and move things around | |
15 | * on us. We need to use _exactly_ the address the user gave us, | |
16 | * not some alias that contains the same information. | |
17 | */ | |
f9e9dcb3 | 18 | typedef struct { int counter; } atomic_t; |
1da177e4 LT |
19 | |
20 | #define ATOMIC_INIT(i) { (i) } | |
21 | ||
22 | /** | |
23 | * atomic_read - read atomic variable | |
24 | * @v: pointer of type atomic_t | |
25 | * | |
26 | * Atomically reads the value of @v. | |
27 | */ | |
28 | #define atomic_read(v) ((v)->counter) | |
29 | ||
30 | /** | |
31 | * atomic_set - set atomic variable | |
32 | * @v: pointer of type atomic_t | |
33 | * @i: required value | |
34 | * | |
35 | * Atomically sets the value of @v to @i. | |
36 | */ | |
37 | #define atomic_set(v,i) (((v)->counter) = (i)) | |
38 | ||
39 | /** | |
40 | * atomic_add - add integer to atomic variable | |
41 | * @i: integer value to add | |
42 | * @v: pointer of type atomic_t | |
43 | * | |
44 | * Atomically adds @i to @v. | |
45 | */ | |
46 | static __inline__ void atomic_add(int i, atomic_t *v) | |
47 | { | |
48 | __asm__ __volatile__( | |
9a0b5817 | 49 | LOCK_PREFIX "addl %1,%0" |
b862f3b0 LT |
50 | :"+m" (v->counter) |
51 | :"ir" (i)); | |
1da177e4 LT |
52 | } |
53 | ||
54 | /** | |
cc38682f | 55 | * atomic_sub - subtract integer from atomic variable |
1da177e4 LT |
56 | * @i: integer value to subtract |
57 | * @v: pointer of type atomic_t | |
58 | * | |
59 | * Atomically subtracts @i from @v. | |
60 | */ | |
61 | static __inline__ void atomic_sub(int i, atomic_t *v) | |
62 | { | |
63 | __asm__ __volatile__( | |
9a0b5817 | 64 | LOCK_PREFIX "subl %1,%0" |
b862f3b0 LT |
65 | :"+m" (v->counter) |
66 | :"ir" (i)); | |
1da177e4 LT |
67 | } |
68 | ||
69 | /** | |
70 | * atomic_sub_and_test - subtract value from variable and test result | |
71 | * @i: integer value to subtract | |
72 | * @v: pointer of type atomic_t | |
73 | * | |
74 | * Atomically subtracts @i from @v and returns | |
75 | * true if the result is zero, or false for all | |
76 | * other cases. | |
77 | */ | |
78 | static __inline__ int atomic_sub_and_test(int i, atomic_t *v) | |
79 | { | |
80 | unsigned char c; | |
81 | ||
82 | __asm__ __volatile__( | |
9a0b5817 | 83 | LOCK_PREFIX "subl %2,%0; sete %1" |
b862f3b0 LT |
84 | :"+m" (v->counter), "=qm" (c) |
85 | :"ir" (i) : "memory"); | |
1da177e4 LT |
86 | return c; |
87 | } | |
88 | ||
89 | /** | |
90 | * atomic_inc - increment atomic variable | |
91 | * @v: pointer of type atomic_t | |
92 | * | |
93 | * Atomically increments @v by 1. | |
94 | */ | |
95 | static __inline__ void atomic_inc(atomic_t *v) | |
96 | { | |
97 | __asm__ __volatile__( | |
9a0b5817 | 98 | LOCK_PREFIX "incl %0" |
b862f3b0 | 99 | :"+m" (v->counter)); |
1da177e4 LT |
100 | } |
101 | ||
102 | /** | |
103 | * atomic_dec - decrement atomic variable | |
104 | * @v: pointer of type atomic_t | |
105 | * | |
106 | * Atomically decrements @v by 1. | |
107 | */ | |
108 | static __inline__ void atomic_dec(atomic_t *v) | |
109 | { | |
110 | __asm__ __volatile__( | |
9a0b5817 | 111 | LOCK_PREFIX "decl %0" |
b862f3b0 | 112 | :"+m" (v->counter)); |
1da177e4 LT |
113 | } |
114 | ||
115 | /** | |
116 | * atomic_dec_and_test - decrement and test | |
117 | * @v: pointer of type atomic_t | |
118 | * | |
119 | * Atomically decrements @v by 1 and | |
120 | * returns true if the result is 0, or false for all other | |
121 | * cases. | |
122 | */ | |
123 | static __inline__ int atomic_dec_and_test(atomic_t *v) | |
124 | { | |
125 | unsigned char c; | |
126 | ||
127 | __asm__ __volatile__( | |
9a0b5817 | 128 | LOCK_PREFIX "decl %0; sete %1" |
b862f3b0 LT |
129 | :"+m" (v->counter), "=qm" (c) |
130 | : : "memory"); | |
1da177e4 LT |
131 | return c != 0; |
132 | } | |
133 | ||
134 | /** | |
135 | * atomic_inc_and_test - increment and test | |
136 | * @v: pointer of type atomic_t | |
137 | * | |
138 | * Atomically increments @v by 1 | |
139 | * and returns true if the result is zero, or false for all | |
140 | * other cases. | |
141 | */ | |
142 | static __inline__ int atomic_inc_and_test(atomic_t *v) | |
143 | { | |
144 | unsigned char c; | |
145 | ||
146 | __asm__ __volatile__( | |
9a0b5817 | 147 | LOCK_PREFIX "incl %0; sete %1" |
b862f3b0 LT |
148 | :"+m" (v->counter), "=qm" (c) |
149 | : : "memory"); | |
1da177e4 LT |
150 | return c != 0; |
151 | } | |
152 | ||
153 | /** | |
154 | * atomic_add_negative - add and test if negative | |
155 | * @v: pointer of type atomic_t | |
156 | * @i: integer value to add | |
157 | * | |
158 | * Atomically adds @i to @v and returns true | |
159 | * if the result is negative, or false when | |
160 | * result is greater than or equal to zero. | |
161 | */ | |
162 | static __inline__ int atomic_add_negative(int i, atomic_t *v) | |
163 | { | |
164 | unsigned char c; | |
165 | ||
166 | __asm__ __volatile__( | |
9a0b5817 | 167 | LOCK_PREFIX "addl %2,%0; sets %1" |
b862f3b0 LT |
168 | :"+m" (v->counter), "=qm" (c) |
169 | :"ir" (i) : "memory"); | |
1da177e4 LT |
170 | return c; |
171 | } | |
172 | ||
173 | /** | |
cc38682f | 174 | * atomic_add_return - add integer and return |
1da177e4 LT |
175 | * @v: pointer of type atomic_t |
176 | * @i: integer value to add | |
177 | * | |
178 | * Atomically adds @i to @v and returns @i + @v | |
179 | */ | |
180 | static __inline__ int atomic_add_return(int i, atomic_t *v) | |
181 | { | |
182 | int __i; | |
183 | #ifdef CONFIG_M386 | |
1bb858f2 | 184 | unsigned long flags; |
03491c92 | 185 | if(unlikely(boot_cpu_data.x86 <= 3)) |
1da177e4 LT |
186 | goto no_xadd; |
187 | #endif | |
188 | /* Modern 486+ processor */ | |
189 | __i = i; | |
190 | __asm__ __volatile__( | |
e4b522d7 DS |
191 | LOCK_PREFIX "xaddl %0, %1" |
192 | :"+r" (i), "+m" (v->counter) | |
193 | : : "memory"); | |
1da177e4 LT |
194 | return i + __i; |
195 | ||
196 | #ifdef CONFIG_M386 | |
197 | no_xadd: /* Legacy 386 processor */ | |
1bb858f2 | 198 | local_irq_save(flags); |
1da177e4 LT |
199 | __i = atomic_read(v); |
200 | atomic_set(v, i + __i); | |
1bb858f2 | 201 | local_irq_restore(flags); |
1da177e4 LT |
202 | return i + __i; |
203 | #endif | |
204 | } | |
205 | ||
cc38682f RD |
206 | /** |
207 | * atomic_sub_return - subtract integer and return | |
208 | * @v: pointer of type atomic_t | |
209 | * @i: integer value to subtract | |
210 | * | |
211 | * Atomically subtracts @i from @v and returns @v - @i | |
212 | */ | |
1da177e4 LT |
213 | static __inline__ int atomic_sub_return(int i, atomic_t *v) |
214 | { | |
215 | return atomic_add_return(-i,v); | |
216 | } | |
217 | ||
e656e245 MD |
218 | #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new))) |
219 | #define atomic_xchg(v, new) (xchg(&((v)->counter), (new))) | |
4a6dae6d | 220 | |
8426e1f6 | 221 | /** |
72fd4a35 | 222 | * atomic_add_unless - add unless the number is already a given value |
8426e1f6 NP |
223 | * @v: pointer of type atomic_t |
224 | * @a: the amount to add to v... | |
225 | * @u: ...unless v is equal to u. | |
226 | * | |
72fd4a35 | 227 | * Atomically adds @a to @v, so long as @v was not already @u. |
8426e1f6 NP |
228 | * Returns non-zero if @v was not @u, and zero otherwise. |
229 | */ | |
2856f5e3 MD |
230 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) |
231 | { | |
232 | int c, old; | |
233 | c = atomic_read(v); | |
234 | for (;;) { | |
235 | if (unlikely(c == (u))) | |
236 | break; | |
237 | old = atomic_cmpxchg((v), c, c + (a)); | |
238 | if (likely(old == c)) | |
239 | break; | |
240 | c = old; | |
241 | } | |
242 | return c != (u); | |
243 | } | |
244 | ||
8426e1f6 NP |
245 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
246 | ||
1da177e4 LT |
247 | #define atomic_inc_return(v) (atomic_add_return(1,v)) |
248 | #define atomic_dec_return(v) (atomic_sub_return(1,v)) | |
249 | ||
250 | /* These are x86-specific, used by some header files */ | |
251 | #define atomic_clear_mask(mask, addr) \ | |
9a0b5817 | 252 | __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \ |
1da177e4 LT |
253 | : : "r" (~(mask)),"m" (*addr) : "memory") |
254 | ||
255 | #define atomic_set_mask(mask, addr) \ | |
9a0b5817 | 256 | __asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \ |
1da177e4 LT |
257 | : : "r" (mask),"m" (*(addr)) : "memory") |
258 | ||
259 | /* Atomic operations are already serializing on x86 */ | |
260 | #define smp_mb__before_atomic_dec() barrier() | |
261 | #define smp_mb__after_atomic_dec() barrier() | |
262 | #define smp_mb__before_atomic_inc() barrier() | |
263 | #define smp_mb__after_atomic_inc() barrier() | |
264 | ||
d3cb4871 | 265 | #include <asm-generic/atomic.h> |
1da177e4 | 266 | #endif |