2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/cmpxchg.h>
25 #define ATOMIC_INIT(i) { (i) }
28 * atomic_read - read atomic variable
29 * @v: pointer of type atomic_t
31 * Atomically reads the value of @v.
33 #define atomic_read(v) ACCESS_ONCE((v)->counter)
36 * atomic_set - set atomic variable
37 * @v: pointer of type atomic_t
40 * Atomically sets the value of @v to @i.
42 #define atomic_set(v, i) ((v)->counter = (i))
44 #define ATOMIC_OP(op, c_op, asm_op) \
45 static __inline__ void atomic_##op(int i, atomic_t * v) \
47 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
50 __asm__ __volatile__( \
51 " .set arch=r4000 \n" \
52 "1: ll %0, %1 # atomic_" #op " \n" \
53 " " #asm_op " %0, %2 \n" \
57 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
59 } else if (kernel_uses_llsc) { \
63 __asm__ __volatile__( \
64 " .set "MIPS_ISA_LEVEL" \n" \
65 " ll %0, %1 # atomic_" #op "\n" \
66 " " #asm_op " %0, %2 \n" \
69 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
71 } while (unlikely(!temp)); \
73 unsigned long flags; \
75 raw_local_irq_save(flags); \
77 raw_local_irq_restore(flags); \
81 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
82 static __inline__ int atomic_##op##_return(int i, atomic_t * v) \
86 smp_mb__before_llsc(); \
88 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
91 __asm__ __volatile__( \
92 " .set arch=r4000 \n" \
93 "1: ll %1, %2 # atomic_" #op "_return \n" \
94 " " #asm_op " %0, %1, %3 \n" \
97 " " #asm_op " %0, %1, %3 \n" \
99 : "=&r" (result), "=&r" (temp), \
100 "+" GCC_OFF_SMALL_ASM() (v->counter) \
102 } else if (kernel_uses_llsc) { \
106 __asm__ __volatile__( \
107 " .set "MIPS_ISA_LEVEL" \n" \
108 " ll %1, %2 # atomic_" #op "_return \n" \
109 " " #asm_op " %0, %1, %3 \n" \
112 : "=&r" (result), "=&r" (temp), \
113 "+" GCC_OFF_SMALL_ASM() (v->counter) \
115 } while (unlikely(!result)); \
117 result = temp; result c_op i; \
119 unsigned long flags; \
121 raw_local_irq_save(flags); \
122 result = v->counter; \
124 v->counter = result; \
125 raw_local_irq_restore(flags); \
133 #define ATOMIC_OPS(op, c_op, asm_op) \
134 ATOMIC_OP(op, c_op, asm_op) \
135 ATOMIC_OP_RETURN(op, c_op, asm_op)
137 ATOMIC_OPS(add
, +=, addu
)
138 ATOMIC_OPS(sub
, -=, subu
)
141 #undef ATOMIC_OP_RETURN
145 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
146 * @i: integer value to subtract
147 * @v: pointer of type atomic_t
149 * Atomically test @v and subtract @i if @v is greater or equal than @i.
150 * The function returns the old value of @v minus @i.
152 static __inline__
int atomic_sub_if_positive(int i
, atomic_t
* v
)
156 smp_mb__before_llsc();
158 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
161 __asm__
__volatile__(
162 " .set arch=r4000 \n"
163 "1: ll %1, %2 # atomic_sub_if_positive\n"
164 " subu %0, %1, %3 \n"
169 " subu %0, %1, %3 \n"
173 : "=&r" (result
), "=&r" (temp
),
174 "+" GCC_OFF_SMALL_ASM() (v
->counter
)
175 : "Ir" (i
), GCC_OFF_SMALL_ASM() (v
->counter
)
177 } else if (kernel_uses_llsc
) {
180 __asm__
__volatile__(
181 " .set "MIPS_ISA_LEVEL
" \n"
182 "1: ll %1, %2 # atomic_sub_if_positive\n"
183 " subu %0, %1, %3 \n"
188 " subu %0, %1, %3 \n"
192 : "=&r" (result
), "=&r" (temp
),
193 "+" GCC_OFF_SMALL_ASM() (v
->counter
)
198 raw_local_irq_save(flags
);
203 raw_local_irq_restore(flags
);
211 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
212 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
215 * __atomic_add_unless - add unless the number is a given value
216 * @v: pointer of type atomic_t
217 * @a: the amount to add to v...
218 * @u: ...unless v is equal to u.
220 * Atomically adds @a to @v, so long as it was not @u.
221 * Returns the old value of @v.
223 static __inline__
int __atomic_add_unless(atomic_t
*v
, int a
, int u
)
228 if (unlikely(c
== (u
)))
230 old
= atomic_cmpxchg((v
), c
, c
+ (a
));
231 if (likely(old
== c
))
238 #define atomic_dec_return(v) atomic_sub_return(1, (v))
239 #define atomic_inc_return(v) atomic_add_return(1, (v))
242 * atomic_sub_and_test - subtract value from variable and test result
243 * @i: integer value to subtract
244 * @v: pointer of type atomic_t
246 * Atomically subtracts @i from @v and returns
247 * true if the result is zero, or false for all
250 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
253 * atomic_inc_and_test - increment and test
254 * @v: pointer of type atomic_t
256 * Atomically increments @v by 1
257 * and returns true if the result is zero, or false for all
260 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
263 * atomic_dec_and_test - decrement by 1 and test
264 * @v: pointer of type atomic_t
266 * Atomically decrements @v by 1 and
267 * returns true if the result is 0, or false for all other
270 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
273 * atomic_dec_if_positive - decrement by 1 if old value positive
274 * @v: pointer of type atomic_t
276 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
279 * atomic_inc - increment atomic variable
280 * @v: pointer of type atomic_t
282 * Atomically increments @v by 1.
284 #define atomic_inc(v) atomic_add(1, (v))
287 * atomic_dec - decrement and test
288 * @v: pointer of type atomic_t
290 * Atomically decrements @v by 1.
292 #define atomic_dec(v) atomic_sub(1, (v))
295 * atomic_add_negative - add and test if negative
296 * @v: pointer of type atomic_t
297 * @i: integer value to add
299 * Atomically adds @i to @v and returns true
300 * if the result is negative, or false when
301 * result is greater than or equal to zero.
303 #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
307 #define ATOMIC64_INIT(i) { (i) }
310 * atomic64_read - read atomic variable
311 * @v: pointer of type atomic64_t
314 #define atomic64_read(v) ACCESS_ONCE((v)->counter)
317 * atomic64_set - set atomic variable
318 * @v: pointer of type atomic64_t
321 #define atomic64_set(v, i) ((v)->counter = (i))
323 #define ATOMIC64_OP(op, c_op, asm_op) \
324 static __inline__ void atomic64_##op(long i, atomic64_t * v) \
326 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
329 __asm__ __volatile__( \
330 " .set arch=r4000 \n" \
331 "1: lld %0, %1 # atomic64_" #op " \n" \
332 " " #asm_op " %0, %2 \n" \
336 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
338 } else if (kernel_uses_llsc) { \
342 __asm__ __volatile__( \
343 " .set "MIPS_ISA_LEVEL" \n" \
344 " lld %0, %1 # atomic64_" #op "\n" \
345 " " #asm_op " %0, %2 \n" \
348 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
350 } while (unlikely(!temp)); \
352 unsigned long flags; \
354 raw_local_irq_save(flags); \
356 raw_local_irq_restore(flags); \
360 #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
361 static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
365 smp_mb__before_llsc(); \
367 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
370 __asm__ __volatile__( \
371 " .set arch=r4000 \n" \
372 "1: lld %1, %2 # atomic64_" #op "_return\n" \
373 " " #asm_op " %0, %1, %3 \n" \
376 " " #asm_op " %0, %1, %3 \n" \
378 : "=&r" (result), "=&r" (temp), \
379 "+" GCC_OFF_SMALL_ASM() (v->counter) \
381 } else if (kernel_uses_llsc) { \
385 __asm__ __volatile__( \
386 " .set "MIPS_ISA_LEVEL" \n" \
387 " lld %1, %2 # atomic64_" #op "_return\n" \
388 " " #asm_op " %0, %1, %3 \n" \
391 : "=&r" (result), "=&r" (temp), \
392 "=" GCC_OFF_SMALL_ASM() (v->counter) \
393 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \
395 } while (unlikely(!result)); \
397 result = temp; result c_op i; \
399 unsigned long flags; \
401 raw_local_irq_save(flags); \
402 result = v->counter; \
404 v->counter = result; \
405 raw_local_irq_restore(flags); \
413 #define ATOMIC64_OPS(op, c_op, asm_op) \
414 ATOMIC64_OP(op, c_op, asm_op) \
415 ATOMIC64_OP_RETURN(op, c_op, asm_op)
417 ATOMIC64_OPS(add
, +=, daddu
)
418 ATOMIC64_OPS(sub
, -=, dsubu
)
421 #undef ATOMIC64_OP_RETURN
425 * atomic64_sub_if_positive - conditionally subtract integer from atomic
427 * @i: integer value to subtract
428 * @v: pointer of type atomic64_t
430 * Atomically test @v and subtract @i if @v is greater or equal than @i.
431 * The function returns the old value of @v minus @i.
433 static __inline__
long atomic64_sub_if_positive(long i
, atomic64_t
* v
)
437 smp_mb__before_llsc();
439 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
442 __asm__
__volatile__(
443 " .set arch=r4000 \n"
444 "1: lld %1, %2 # atomic64_sub_if_positive\n"
445 " dsubu %0, %1, %3 \n"
450 " dsubu %0, %1, %3 \n"
454 : "=&r" (result
), "=&r" (temp
),
455 "=" GCC_OFF_SMALL_ASM() (v
->counter
)
456 : "Ir" (i
), GCC_OFF_SMALL_ASM() (v
->counter
)
458 } else if (kernel_uses_llsc
) {
461 __asm__
__volatile__(
462 " .set "MIPS_ISA_LEVEL
" \n"
463 "1: lld %1, %2 # atomic64_sub_if_positive\n"
464 " dsubu %0, %1, %3 \n"
469 " dsubu %0, %1, %3 \n"
473 : "=&r" (result
), "=&r" (temp
),
474 "+" GCC_OFF_SMALL_ASM() (v
->counter
)
479 raw_local_irq_save(flags
);
484 raw_local_irq_restore(flags
);
492 #define atomic64_cmpxchg(v, o, n) \
493 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
494 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
497 * atomic64_add_unless - add unless the number is a given value
498 * @v: pointer of type atomic64_t
499 * @a: the amount to add to v...
500 * @u: ...unless v is equal to u.
502 * Atomically adds @a to @v, so long as it was not @u.
503 * Returns the old value of @v.
505 static __inline__
int atomic64_add_unless(atomic64_t
*v
, long a
, long u
)
508 c
= atomic64_read(v
);
510 if (unlikely(c
== (u
)))
512 old
= atomic64_cmpxchg((v
), c
, c
+ (a
));
513 if (likely(old
== c
))
520 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
522 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
523 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
526 * atomic64_sub_and_test - subtract value from variable and test result
527 * @i: integer value to subtract
528 * @v: pointer of type atomic64_t
530 * Atomically subtracts @i from @v and returns
531 * true if the result is zero, or false for all
534 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
537 * atomic64_inc_and_test - increment and test
538 * @v: pointer of type atomic64_t
540 * Atomically increments @v by 1
541 * and returns true if the result is zero, or false for all
544 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
547 * atomic64_dec_and_test - decrement by 1 and test
548 * @v: pointer of type atomic64_t
550 * Atomically decrements @v by 1 and
551 * returns true if the result is 0, or false for all other
554 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
557 * atomic64_dec_if_positive - decrement by 1 if old value positive
558 * @v: pointer of type atomic64_t
560 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
563 * atomic64_inc - increment atomic variable
564 * @v: pointer of type atomic64_t
566 * Atomically increments @v by 1.
568 #define atomic64_inc(v) atomic64_add(1, (v))
571 * atomic64_dec - decrement and test
572 * @v: pointer of type atomic64_t
574 * Atomically decrements @v by 1.
576 #define atomic64_dec(v) atomic64_sub(1, (v))
579 * atomic64_add_negative - add and test if negative
580 * @v: pointer of type atomic64_t
581 * @i: integer value to add
583 * Atomically adds @i to @v and returns true
584 * if the result is negative, or false when
585 * result is greater than or equal to zero.
587 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
589 #endif /* CONFIG_64BIT */
591 #endif /* _ASM_ATOMIC_H */
This page took 0.075582 seconds and 5 git commands to generate.