Commit | Line | Data |
---|---|---|
1da177e4 | 1 | /* |
edf7b938 | 2 | * Atomic operations that C can't guarantee us. Useful for |
1da177e4 LT |
3 | * resource counting etc.. |
4 | * | |
5 | * But use these as seldom as possible since they are much more slower | |
6 | * than regular operations. | |
7 | * | |
8 | * This file is subject to the terms and conditions of the GNU General Public | |
9 | * License. See the file "COPYING" in the main directory of this archive | |
10 | * for more details. | |
11 | * | |
e303e088 | 12 | * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle |
1da177e4 | 13 | */ |
1da177e4 LT |
14 | #ifndef _ASM_ATOMIC_H |
15 | #define _ASM_ATOMIC_H | |
16 | ||
192ef366 | 17 | #include <linux/irqflags.h> |
ea435467 | 18 | #include <linux/types.h> |
0004a9df | 19 | #include <asm/barrier.h> |
b0984c43 | 20 | #include <asm/compiler.h> |
1da177e4 | 21 | #include <asm/cpu-features.h> |
b81947c6 | 22 | #include <asm/cmpxchg.h> |
1da177e4 LT |
23 | #include <asm/war.h> |
24 | ||
70342287 | 25 | #define ATOMIC_INIT(i) { (i) } |
1da177e4 LT |
26 | |
27 | /* | |
28 | * atomic_read - read atomic variable | |
29 | * @v: pointer of type atomic_t | |
30 | * | |
31 | * Atomically reads the value of @v. | |
32 | */ | |
62e8a325 | 33 | #define atomic_read(v) READ_ONCE((v)->counter) |
1da177e4 LT |
34 | |
35 | /* | |
36 | * atomic_set - set atomic variable | |
37 | * @v: pointer of type atomic_t | |
38 | * @i: required value | |
39 | * | |
40 | * Atomically sets the value of @v to @i. | |
41 | */ | |
62e8a325 | 42 | #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i)) |
1da177e4 | 43 | |
ddb3108e MR |
44 | #define ATOMIC_OP(op, c_op, asm_op) \ |
45 | static __inline__ void atomic_##op(int i, atomic_t * v) \ | |
46 | { \ | |
47 | if (kernel_uses_llsc && R10000_LLSC_WAR) { \ | |
48 | int temp; \ | |
49 | \ | |
50 | __asm__ __volatile__( \ | |
51 | " .set arch=r4000 \n" \ | |
52 | "1: ll %0, %1 # atomic_" #op " \n" \ | |
53 | " " #asm_op " %0, %2 \n" \ | |
54 | " sc %0, %1 \n" \ | |
55 | " beqzl %0, 1b \n" \ | |
56 | " .set mips0 \n" \ | |
94bfb75a | 57 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
ddb3108e MR |
58 | : "Ir" (i)); \ |
59 | } else if (kernel_uses_llsc) { \ | |
60 | int temp; \ | |
61 | \ | |
62 | do { \ | |
63 | __asm__ __volatile__( \ | |
0038df22 | 64 | " .set "MIPS_ISA_LEVEL" \n" \ |
ddb3108e MR |
65 | " ll %0, %1 # atomic_" #op "\n" \ |
66 | " " #asm_op " %0, %2 \n" \ | |
67 | " sc %0, %1 \n" \ | |
68 | " .set mips0 \n" \ | |
4edac529 | 69 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
ddb3108e MR |
70 | : "Ir" (i)); \ |
71 | } while (unlikely(!temp)); \ | |
72 | } else { \ | |
73 | unsigned long flags; \ | |
74 | \ | |
75 | raw_local_irq_save(flags); \ | |
76 | v->counter c_op i; \ | |
77 | raw_local_irq_restore(flags); \ | |
78 | } \ | |
1da177e4 LT |
79 | } |
80 | ||
ddb3108e | 81 | #define ATOMIC_OP_RETURN(op, c_op, asm_op) \ |
4ec45856 | 82 | static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \ |
ddb3108e MR |
83 | { \ |
84 | int result; \ | |
85 | \ | |
ddb3108e MR |
86 | if (kernel_uses_llsc && R10000_LLSC_WAR) { \ |
87 | int temp; \ | |
88 | \ | |
89 | __asm__ __volatile__( \ | |
90 | " .set arch=r4000 \n" \ | |
91 | "1: ll %1, %2 # atomic_" #op "_return \n" \ | |
92 | " " #asm_op " %0, %1, %3 \n" \ | |
93 | " sc %0, %2 \n" \ | |
94 | " beqzl %0, 1b \n" \ | |
95 | " " #asm_op " %0, %1, %3 \n" \ | |
96 | " .set mips0 \n" \ | |
97 | : "=&r" (result), "=&r" (temp), \ | |
94bfb75a | 98 | "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
ddb3108e MR |
99 | : "Ir" (i)); \ |
100 | } else if (kernel_uses_llsc) { \ | |
101 | int temp; \ | |
102 | \ | |
103 | do { \ | |
104 | __asm__ __volatile__( \ | |
0038df22 | 105 | " .set "MIPS_ISA_LEVEL" \n" \ |
ddb3108e MR |
106 | " ll %1, %2 # atomic_" #op "_return \n" \ |
107 | " " #asm_op " %0, %1, %3 \n" \ | |
108 | " sc %0, %2 \n" \ | |
109 | " .set mips0 \n" \ | |
110 | : "=&r" (result), "=&r" (temp), \ | |
94bfb75a | 111 | "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
ddb3108e MR |
112 | : "Ir" (i)); \ |
113 | } while (unlikely(!result)); \ | |
114 | \ | |
115 | result = temp; result c_op i; \ | |
116 | } else { \ | |
117 | unsigned long flags; \ | |
118 | \ | |
119 | raw_local_irq_save(flags); \ | |
120 | result = v->counter; \ | |
121 | result c_op i; \ | |
122 | v->counter = result; \ | |
123 | raw_local_irq_restore(flags); \ | |
124 | } \ | |
125 | \ | |
ddb3108e MR |
126 | return result; \ |
127 | } | |
128 | ||
4edac529 | 129 | #define ATOMIC_FETCH_OP(op, c_op, asm_op) \ |
4ec45856 | 130 | static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \ |
4edac529 PZ |
131 | { \ |
132 | int result; \ | |
133 | \ | |
4edac529 PZ |
134 | if (kernel_uses_llsc && R10000_LLSC_WAR) { \ |
135 | int temp; \ | |
136 | \ | |
137 | __asm__ __volatile__( \ | |
138 | " .set arch=r4000 \n" \ | |
139 | "1: ll %1, %2 # atomic_fetch_" #op " \n" \ | |
140 | " " #asm_op " %0, %1, %3 \n" \ | |
141 | " sc %0, %2 \n" \ | |
142 | " beqzl %0, 1b \n" \ | |
143 | " move %0, %1 \n" \ | |
144 | " .set mips0 \n" \ | |
145 | : "=&r" (result), "=&r" (temp), \ | |
146 | "+" GCC_OFF_SMALL_ASM() (v->counter) \ | |
147 | : "Ir" (i)); \ | |
148 | } else if (kernel_uses_llsc) { \ | |
149 | int temp; \ | |
150 | \ | |
151 | do { \ | |
152 | __asm__ __volatile__( \ | |
153 | " .set "MIPS_ISA_LEVEL" \n" \ | |
154 | " ll %1, %2 # atomic_fetch_" #op " \n" \ | |
155 | " " #asm_op " %0, %1, %3 \n" \ | |
156 | " sc %0, %2 \n" \ | |
157 | " .set mips0 \n" \ | |
158 | : "=&r" (result), "=&r" (temp), \ | |
159 | "+" GCC_OFF_SMALL_ASM() (v->counter) \ | |
160 | : "Ir" (i)); \ | |
161 | } while (unlikely(!result)); \ | |
162 | \ | |
163 | result = temp; \ | |
164 | } else { \ | |
165 | unsigned long flags; \ | |
166 | \ | |
167 | raw_local_irq_save(flags); \ | |
168 | result = v->counter; \ | |
169 | v->counter c_op i; \ | |
170 | raw_local_irq_restore(flags); \ | |
171 | } \ | |
172 | \ | |
4edac529 PZ |
173 | return result; \ |
174 | } | |
175 | ||
ddb3108e MR |
176 | #define ATOMIC_OPS(op, c_op, asm_op) \ |
177 | ATOMIC_OP(op, c_op, asm_op) \ | |
4edac529 PZ |
178 | ATOMIC_OP_RETURN(op, c_op, asm_op) \ |
179 | ATOMIC_FETCH_OP(op, c_op, asm_op) | |
1da177e4 | 180 | |
ef31563e PZ |
181 | ATOMIC_OPS(add, +=, addu) |
182 | ATOMIC_OPS(sub, -=, subu) | |
1da177e4 | 183 | |
4ec45856 PZ |
184 | #define atomic_add_return_relaxed atomic_add_return_relaxed |
185 | #define atomic_sub_return_relaxed atomic_sub_return_relaxed | |
186 | #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed | |
187 | #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed | |
188 | ||
4edac529 PZ |
189 | #undef ATOMIC_OPS |
190 | #define ATOMIC_OPS(op, c_op, asm_op) \ | |
191 | ATOMIC_OP(op, c_op, asm_op) \ | |
192 | ATOMIC_FETCH_OP(op, c_op, asm_op) | |
193 | ||
4edac529 PZ |
194 | ATOMIC_OPS(and, &=, and) |
195 | ATOMIC_OPS(or, |=, or) | |
196 | ATOMIC_OPS(xor, ^=, xor) | |
27782f27 | 197 | |
4ec45856 PZ |
198 | #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed |
199 | #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed | |
200 | #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed | |
201 | ||
ef31563e | 202 | #undef ATOMIC_OPS |
4edac529 | 203 | #undef ATOMIC_FETCH_OP |
ef31563e PZ |
204 | #undef ATOMIC_OP_RETURN |
205 | #undef ATOMIC_OP | |
1da177e4 LT |
206 | |
207 | /* | |
f10d14dd AG |
208 | * atomic_sub_if_positive - conditionally subtract integer from atomic variable |
209 | * @i: integer value to subtract | |
1da177e4 LT |
210 | * @v: pointer of type atomic_t |
211 | * | |
f10d14dd AG |
212 | * Atomically test @v and subtract @i if @v is greater or equal than @i. |
213 | * The function returns the old value of @v minus @i. | |
1da177e4 LT |
214 | */ |
215 | static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) | |
216 | { | |
915ec1e2 | 217 | int result; |
1da177e4 | 218 | |
f252ffd5 | 219 | smp_mb__before_llsc(); |
0004a9df | 220 | |
b791d119 | 221 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
915ec1e2 | 222 | int temp; |
1da177e4 LT |
223 | |
224 | __asm__ __volatile__( | |
a809d460 | 225 | " .set arch=r4000 \n" |
1da177e4 LT |
226 | "1: ll %1, %2 # atomic_sub_if_positive\n" |
227 | " subu %0, %1, %3 \n" | |
228 | " bltz %0, 1f \n" | |
229 | " sc %0, %2 \n" | |
92f22c18 | 230 | " .set noreorder \n" |
1da177e4 | 231 | " beqzl %0, 1b \n" |
92f22c18 RB |
232 | " subu %0, %1, %3 \n" |
233 | " .set reorder \n" | |
1da177e4 | 234 | "1: \n" |
aac8aa77 | 235 | " .set mips0 \n" |
b0984c43 | 236 | : "=&r" (result), "=&r" (temp), |
94bfb75a MC |
237 | "+" GCC_OFF_SMALL_ASM() (v->counter) |
238 | : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) | |
1da177e4 | 239 | : "memory"); |
b791d119 | 240 | } else if (kernel_uses_llsc) { |
915ec1e2 | 241 | int temp; |
1da177e4 LT |
242 | |
243 | __asm__ __volatile__( | |
0038df22 | 244 | " .set "MIPS_ISA_LEVEL" \n" |
1da177e4 LT |
245 | "1: ll %1, %2 # atomic_sub_if_positive\n" |
246 | " subu %0, %1, %3 \n" | |
247 | " bltz %0, 1f \n" | |
248 | " sc %0, %2 \n" | |
92f22c18 | 249 | " .set noreorder \n" |
7837314d | 250 | " beqz %0, 1b \n" |
92f22c18 RB |
251 | " subu %0, %1, %3 \n" |
252 | " .set reorder \n" | |
50952026 | 253 | "1: \n" |
aac8aa77 | 254 | " .set mips0 \n" |
b0984c43 | 255 | : "=&r" (result), "=&r" (temp), |
94bfb75a | 256 | "+" GCC_OFF_SMALL_ASM() (v->counter) |
b4f2a17b | 257 | : "Ir" (i)); |
1da177e4 LT |
258 | } else { |
259 | unsigned long flags; | |
260 | ||
49edd098 | 261 | raw_local_irq_save(flags); |
1da177e4 LT |
262 | result = v->counter; |
263 | result -= i; | |
264 | if (result >= 0) | |
265 | v->counter = result; | |
49edd098 | 266 | raw_local_irq_restore(flags); |
1da177e4 LT |
267 | } |
268 | ||
17099b11 | 269 | smp_llsc_mb(); |
0004a9df | 270 | |
1da177e4 LT |
271 | return result; |
272 | } | |
273 | ||
e12f644b MD |
274 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) |
275 | #define atomic_xchg(v, new) (xchg(&((v)->counter), (new))) | |
4a6dae6d | 276 | |
8426e1f6 | 277 | /** |
f24219b4 | 278 | * __atomic_add_unless - add unless the number is a given value |
8426e1f6 NP |
279 | * @v: pointer of type atomic_t |
280 | * @a: the amount to add to v... | |
281 | * @u: ...unless v is equal to u. | |
282 | * | |
283 | * Atomically adds @a to @v, so long as it was not @u. | |
f24219b4 | 284 | * Returns the old value of @v. |
8426e1f6 | 285 | */ |
f24219b4 | 286 | static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) |
2856f5e3 MD |
287 | { |
288 | int c, old; | |
289 | c = atomic_read(v); | |
290 | for (;;) { | |
291 | if (unlikely(c == (u))) | |
292 | break; | |
293 | old = atomic_cmpxchg((v), c, c + (a)); | |
294 | if (likely(old == c)) | |
295 | break; | |
296 | c = old; | |
297 | } | |
f24219b4 | 298 | return c; |
2856f5e3 | 299 | } |
8426e1f6 | 300 | |
21a151d8 RB |
301 | #define atomic_dec_return(v) atomic_sub_return(1, (v)) |
302 | #define atomic_inc_return(v) atomic_add_return(1, (v)) | |
1da177e4 LT |
303 | |
304 | /* | |
305 | * atomic_sub_and_test - subtract value from variable and test result | |
306 | * @i: integer value to subtract | |
307 | * @v: pointer of type atomic_t | |
308 | * | |
309 | * Atomically subtracts @i from @v and returns | |
310 | * true if the result is zero, or false for all | |
311 | * other cases. | |
312 | */ | |
21a151d8 | 313 | #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0) |
1da177e4 LT |
314 | |
315 | /* | |
316 | * atomic_inc_and_test - increment and test | |
317 | * @v: pointer of type atomic_t | |
318 | * | |
319 | * Atomically increments @v by 1 | |
320 | * and returns true if the result is zero, or false for all | |
321 | * other cases. | |
322 | */ | |
323 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | |
324 | ||
325 | /* | |
326 | * atomic_dec_and_test - decrement by 1 and test | |
327 | * @v: pointer of type atomic_t | |
328 | * | |
329 | * Atomically decrements @v by 1 and | |
330 | * returns true if the result is 0, or false for all other | |
331 | * cases. | |
332 | */ | |
333 | #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) | |
334 | ||
335 | /* | |
336 | * atomic_dec_if_positive - decrement by 1 if old value positive | |
337 | * @v: pointer of type atomic_t | |
338 | */ | |
339 | #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v) | |
340 | ||
341 | /* | |
342 | * atomic_inc - increment atomic variable | |
343 | * @v: pointer of type atomic_t | |
344 | * | |
345 | * Atomically increments @v by 1. | |
346 | */ | |
21a151d8 | 347 | #define atomic_inc(v) atomic_add(1, (v)) |
1da177e4 LT |
348 | |
349 | /* | |
350 | * atomic_dec - decrement and test | |
351 | * @v: pointer of type atomic_t | |
352 | * | |
353 | * Atomically decrements @v by 1. | |
354 | */ | |
21a151d8 | 355 | #define atomic_dec(v) atomic_sub(1, (v)) |
1da177e4 LT |
356 | |
357 | /* | |
358 | * atomic_add_negative - add and test if negative | |
359 | * @v: pointer of type atomic_t | |
360 | * @i: integer value to add | |
361 | * | |
362 | * Atomically adds @i to @v and returns true | |
363 | * if the result is negative, or false when | |
364 | * result is greater than or equal to zero. | |
365 | */ | |
21a151d8 | 366 | #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0) |
1da177e4 | 367 | |
875d43e7 | 368 | #ifdef CONFIG_64BIT |
1da177e4 | 369 | |
1da177e4 LT |
370 | #define ATOMIC64_INIT(i) { (i) } |
371 | ||
372 | /* | |
373 | * atomic64_read - read atomic variable | |
374 | * @v: pointer of type atomic64_t | |
375 | * | |
376 | */ | |
62e8a325 | 377 | #define atomic64_read(v) READ_ONCE((v)->counter) |
1da177e4 LT |
378 | |
379 | /* | |
380 | * atomic64_set - set atomic variable | |
381 | * @v: pointer of type atomic64_t | |
382 | * @i: required value | |
383 | */ | |
62e8a325 | 384 | #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i)) |
1da177e4 | 385 | |
ddb3108e MR |
386 | #define ATOMIC64_OP(op, c_op, asm_op) \ |
387 | static __inline__ void atomic64_##op(long i, atomic64_t * v) \ | |
388 | { \ | |
389 | if (kernel_uses_llsc && R10000_LLSC_WAR) { \ | |
390 | long temp; \ | |
391 | \ | |
392 | __asm__ __volatile__( \ | |
393 | " .set arch=r4000 \n" \ | |
394 | "1: lld %0, %1 # atomic64_" #op " \n" \ | |
395 | " " #asm_op " %0, %2 \n" \ | |
396 | " scd %0, %1 \n" \ | |
397 | " beqzl %0, 1b \n" \ | |
398 | " .set mips0 \n" \ | |
94bfb75a | 399 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
ddb3108e MR |
400 | : "Ir" (i)); \ |
401 | } else if (kernel_uses_llsc) { \ | |
402 | long temp; \ | |
403 | \ | |
404 | do { \ | |
405 | __asm__ __volatile__( \ | |
0038df22 | 406 | " .set "MIPS_ISA_LEVEL" \n" \ |
ddb3108e MR |
407 | " lld %0, %1 # atomic64_" #op "\n" \ |
408 | " " #asm_op " %0, %2 \n" \ | |
409 | " scd %0, %1 \n" \ | |
410 | " .set mips0 \n" \ | |
94bfb75a | 411 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
ddb3108e MR |
412 | : "Ir" (i)); \ |
413 | } while (unlikely(!temp)); \ | |
414 | } else { \ | |
415 | unsigned long flags; \ | |
416 | \ | |
417 | raw_local_irq_save(flags); \ | |
418 | v->counter c_op i; \ | |
419 | raw_local_irq_restore(flags); \ | |
420 | } \ | |
421 | } | |
422 | ||
423 | #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \ | |
4ec45856 | 424 | static __inline__ long atomic64_##op##_return_relaxed(long i, atomic64_t * v) \ |
ddb3108e MR |
425 | { \ |
426 | long result; \ | |
427 | \ | |
ddb3108e MR |
428 | if (kernel_uses_llsc && R10000_LLSC_WAR) { \ |
429 | long temp; \ | |
430 | \ | |
431 | __asm__ __volatile__( \ | |
432 | " .set arch=r4000 \n" \ | |
433 | "1: lld %1, %2 # atomic64_" #op "_return\n" \ | |
434 | " " #asm_op " %0, %1, %3 \n" \ | |
435 | " scd %0, %2 \n" \ | |
436 | " beqzl %0, 1b \n" \ | |
437 | " " #asm_op " %0, %1, %3 \n" \ | |
438 | " .set mips0 \n" \ | |
439 | : "=&r" (result), "=&r" (temp), \ | |
94bfb75a | 440 | "+" GCC_OFF_SMALL_ASM() (v->counter) \ |
ddb3108e MR |
441 | : "Ir" (i)); \ |
442 | } else if (kernel_uses_llsc) { \ | |
443 | long temp; \ | |
444 | \ | |
445 | do { \ | |
446 | __asm__ __volatile__( \ | |
0038df22 | 447 | " .set "MIPS_ISA_LEVEL" \n" \ |
ddb3108e MR |
448 | " lld %1, %2 # atomic64_" #op "_return\n" \ |
449 | " " #asm_op " %0, %1, %3 \n" \ | |
450 | " scd %0, %2 \n" \ | |
451 | " .set mips0 \n" \ | |
452 | : "=&r" (result), "=&r" (temp), \ | |
94bfb75a MC |
453 | "=" GCC_OFF_SMALL_ASM() (v->counter) \ |
454 | : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \ | |
ddb3108e MR |
455 | : "memory"); \ |
456 | } while (unlikely(!result)); \ | |
457 | \ | |
458 | result = temp; result c_op i; \ | |
459 | } else { \ | |
460 | unsigned long flags; \ | |
461 | \ | |
462 | raw_local_irq_save(flags); \ | |
463 | result = v->counter; \ | |
464 | result c_op i; \ | |
465 | v->counter = result; \ | |
466 | raw_local_irq_restore(flags); \ | |
467 | } \ | |
468 | \ | |
ddb3108e | 469 | return result; \ |
1da177e4 LT |
470 | } |
471 | ||
4edac529 | 472 | #define ATOMIC64_FETCH_OP(op, c_op, asm_op) \ |
4ec45856 | 473 | static __inline__ long atomic64_fetch_##op##_relaxed(long i, atomic64_t * v) \ |
4edac529 PZ |
474 | { \ |
475 | long result; \ | |
476 | \ | |
4edac529 PZ |
477 | if (kernel_uses_llsc && R10000_LLSC_WAR) { \ |
478 | long temp; \ | |
479 | \ | |
480 | __asm__ __volatile__( \ | |
481 | " .set arch=r4000 \n" \ | |
482 | "1: lld %1, %2 # atomic64_fetch_" #op "\n" \ | |
483 | " " #asm_op " %0, %1, %3 \n" \ | |
484 | " scd %0, %2 \n" \ | |
485 | " beqzl %0, 1b \n" \ | |
486 | " move %0, %1 \n" \ | |
487 | " .set mips0 \n" \ | |
488 | : "=&r" (result), "=&r" (temp), \ | |
489 | "+" GCC_OFF_SMALL_ASM() (v->counter) \ | |
490 | : "Ir" (i)); \ | |
491 | } else if (kernel_uses_llsc) { \ | |
492 | long temp; \ | |
493 | \ | |
494 | do { \ | |
495 | __asm__ __volatile__( \ | |
496 | " .set "MIPS_ISA_LEVEL" \n" \ | |
497 | " lld %1, %2 # atomic64_fetch_" #op "\n" \ | |
498 | " " #asm_op " %0, %1, %3 \n" \ | |
499 | " scd %0, %2 \n" \ | |
500 | " .set mips0 \n" \ | |
501 | : "=&r" (result), "=&r" (temp), \ | |
502 | "=" GCC_OFF_SMALL_ASM() (v->counter) \ | |
503 | : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \ | |
504 | : "memory"); \ | |
505 | } while (unlikely(!result)); \ | |
506 | \ | |
507 | result = temp; \ | |
508 | } else { \ | |
509 | unsigned long flags; \ | |
510 | \ | |
511 | raw_local_irq_save(flags); \ | |
512 | result = v->counter; \ | |
513 | v->counter c_op i; \ | |
514 | raw_local_irq_restore(flags); \ | |
515 | } \ | |
516 | \ | |
4edac529 PZ |
517 | return result; \ |
518 | } | |
519 | ||
ddb3108e MR |
520 | #define ATOMIC64_OPS(op, c_op, asm_op) \ |
521 | ATOMIC64_OP(op, c_op, asm_op) \ | |
4edac529 PZ |
522 | ATOMIC64_OP_RETURN(op, c_op, asm_op) \ |
523 | ATOMIC64_FETCH_OP(op, c_op, asm_op) | |
1da177e4 | 524 | |
ef31563e PZ |
525 | ATOMIC64_OPS(add, +=, daddu) |
526 | ATOMIC64_OPS(sub, -=, dsubu) | |
1da177e4 | 527 | |
4ec45856 PZ |
528 | #define atomic64_add_return_relaxed atomic64_add_return_relaxed |
529 | #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed | |
530 | #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed | |
531 | #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed | |
532 | ||
ef31563e | 533 | #undef ATOMIC64_OPS |
4edac529 PZ |
534 | #define ATOMIC64_OPS(op, c_op, asm_op) \ |
535 | ATOMIC64_OP(op, c_op, asm_op) \ | |
536 | ATOMIC64_FETCH_OP(op, c_op, asm_op) | |
537 | ||
538 | ATOMIC64_OPS(and, &=, and) | |
539 | ATOMIC64_OPS(or, |=, or) | |
540 | ATOMIC64_OPS(xor, ^=, xor) | |
541 | ||
4ec45856 PZ |
542 | #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed |
543 | #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed | |
544 | #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed | |
545 | ||
4edac529 PZ |
546 | #undef ATOMIC64_OPS |
547 | #undef ATOMIC64_FETCH_OP | |
ef31563e PZ |
548 | #undef ATOMIC64_OP_RETURN |
549 | #undef ATOMIC64_OP | |
1da177e4 LT |
550 | |
551 | /* | |
ddb3108e MR |
552 | * atomic64_sub_if_positive - conditionally subtract integer from atomic |
553 | * variable | |
f10d14dd | 554 | * @i: integer value to subtract |
1da177e4 LT |
555 | * @v: pointer of type atomic64_t |
556 | * | |
f10d14dd AG |
557 | * Atomically test @v and subtract @i if @v is greater or equal than @i. |
558 | * The function returns the old value of @v minus @i. | |
1da177e4 LT |
559 | */ |
560 | static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) | |
561 | { | |
915ec1e2 | 562 | long result; |
1da177e4 | 563 | |
f252ffd5 | 564 | smp_mb__before_llsc(); |
0004a9df | 565 | |
b791d119 | 566 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
915ec1e2 | 567 | long temp; |
1da177e4 LT |
568 | |
569 | __asm__ __volatile__( | |
a809d460 | 570 | " .set arch=r4000 \n" |
1da177e4 LT |
571 | "1: lld %1, %2 # atomic64_sub_if_positive\n" |
572 | " dsubu %0, %1, %3 \n" | |
573 | " bltz %0, 1f \n" | |
574 | " scd %0, %2 \n" | |
92f22c18 | 575 | " .set noreorder \n" |
1da177e4 | 576 | " beqzl %0, 1b \n" |
92f22c18 RB |
577 | " dsubu %0, %1, %3 \n" |
578 | " .set reorder \n" | |
1da177e4 | 579 | "1: \n" |
aac8aa77 | 580 | " .set mips0 \n" |
b0984c43 | 581 | : "=&r" (result), "=&r" (temp), |
94bfb75a MC |
582 | "=" GCC_OFF_SMALL_ASM() (v->counter) |
583 | : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) | |
1da177e4 | 584 | : "memory"); |
b791d119 | 585 | } else if (kernel_uses_llsc) { |
915ec1e2 | 586 | long temp; |
1da177e4 LT |
587 | |
588 | __asm__ __volatile__( | |
0038df22 | 589 | " .set "MIPS_ISA_LEVEL" \n" |
1da177e4 LT |
590 | "1: lld %1, %2 # atomic64_sub_if_positive\n" |
591 | " dsubu %0, %1, %3 \n" | |
592 | " bltz %0, 1f \n" | |
593 | " scd %0, %2 \n" | |
92f22c18 | 594 | " .set noreorder \n" |
7837314d | 595 | " beqz %0, 1b \n" |
92f22c18 RB |
596 | " dsubu %0, %1, %3 \n" |
597 | " .set reorder \n" | |
50952026 | 598 | "1: \n" |
aac8aa77 | 599 | " .set mips0 \n" |
b0984c43 | 600 | : "=&r" (result), "=&r" (temp), |
94bfb75a | 601 | "+" GCC_OFF_SMALL_ASM() (v->counter) |
b4f2a17b | 602 | : "Ir" (i)); |
1da177e4 LT |
603 | } else { |
604 | unsigned long flags; | |
605 | ||
49edd098 | 606 | raw_local_irq_save(flags); |
1da177e4 LT |
607 | result = v->counter; |
608 | result -= i; | |
609 | if (result >= 0) | |
610 | v->counter = result; | |
49edd098 | 611 | raw_local_irq_restore(flags); |
1da177e4 LT |
612 | } |
613 | ||
17099b11 | 614 | smp_llsc_mb(); |
0004a9df | 615 | |
1da177e4 LT |
616 | return result; |
617 | } | |
618 | ||
e12f644b | 619 | #define atomic64_cmpxchg(v, o, n) \ |
7b239bb1 | 620 | ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n))) |
e12f644b MD |
621 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new))) |
622 | ||
623 | /** | |
624 | * atomic64_add_unless - add unless the number is a given value | |
625 | * @v: pointer of type atomic64_t | |
626 | * @a: the amount to add to v... | |
627 | * @u: ...unless v is equal to u. | |
628 | * | |
629 | * Atomically adds @a to @v, so long as it was not @u. | |
f25319d2 | 630 | * Returns true iff @v was not @u. |
e12f644b | 631 | */ |
2856f5e3 MD |
632 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) |
633 | { | |
634 | long c, old; | |
635 | c = atomic64_read(v); | |
636 | for (;;) { | |
637 | if (unlikely(c == (u))) | |
638 | break; | |
639 | old = atomic64_cmpxchg((v), c, c + (a)); | |
640 | if (likely(old == c)) | |
641 | break; | |
642 | c = old; | |
643 | } | |
644 | return c != (u); | |
645 | } | |
646 | ||
e12f644b MD |
647 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) |
648 | ||
21a151d8 RB |
649 | #define atomic64_dec_return(v) atomic64_sub_return(1, (v)) |
650 | #define atomic64_inc_return(v) atomic64_add_return(1, (v)) | |
1da177e4 LT |
651 | |
652 | /* | |
653 | * atomic64_sub_and_test - subtract value from variable and test result | |
654 | * @i: integer value to subtract | |
655 | * @v: pointer of type atomic64_t | |
656 | * | |
657 | * Atomically subtracts @i from @v and returns | |
658 | * true if the result is zero, or false for all | |
659 | * other cases. | |
660 | */ | |
21a151d8 | 661 | #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0) |
1da177e4 LT |
662 | |
663 | /* | |
664 | * atomic64_inc_and_test - increment and test | |
665 | * @v: pointer of type atomic64_t | |
666 | * | |
667 | * Atomically increments @v by 1 | |
668 | * and returns true if the result is zero, or false for all | |
669 | * other cases. | |
670 | */ | |
671 | #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) | |
672 | ||
673 | /* | |
674 | * atomic64_dec_and_test - decrement by 1 and test | |
675 | * @v: pointer of type atomic64_t | |
676 | * | |
677 | * Atomically decrements @v by 1 and | |
678 | * returns true if the result is 0, or false for all other | |
679 | * cases. | |
680 | */ | |
681 | #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0) | |
682 | ||
683 | /* | |
684 | * atomic64_dec_if_positive - decrement by 1 if old value positive | |
685 | * @v: pointer of type atomic64_t | |
686 | */ | |
687 | #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v) | |
688 | ||
689 | /* | |
690 | * atomic64_inc - increment atomic variable | |
691 | * @v: pointer of type atomic64_t | |
692 | * | |
693 | * Atomically increments @v by 1. | |
694 | */ | |
21a151d8 | 695 | #define atomic64_inc(v) atomic64_add(1, (v)) |
1da177e4 LT |
696 | |
697 | /* | |
698 | * atomic64_dec - decrement and test | |
699 | * @v: pointer of type atomic64_t | |
700 | * | |
701 | * Atomically decrements @v by 1. | |
702 | */ | |
21a151d8 | 703 | #define atomic64_dec(v) atomic64_sub(1, (v)) |
1da177e4 LT |
704 | |
705 | /* | |
706 | * atomic64_add_negative - add and test if negative | |
707 | * @v: pointer of type atomic64_t | |
708 | * @i: integer value to add | |
709 | * | |
710 | * Atomically adds @i to @v and returns true | |
711 | * if the result is negative, or false when | |
712 | * result is greater than or equal to zero. | |
713 | */ | |
21a151d8 | 714 | #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0) |
1da177e4 | 715 | |
875d43e7 | 716 | #endif /* CONFIG_64BIT */ |
1da177e4 | 717 | |
1da177e4 | 718 | #endif /* _ASM_ATOMIC_H */ |