2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/cmpxchg.h>
25 #define ATOMIC_INIT(i) { (i) }
28 * atomic_read - read atomic variable
29 * @v: pointer of type atomic_t
31 * Atomically reads the value of @v.
33 #define atomic_read(v) READ_ONCE((v)->counter)
36 * atomic_set - set atomic variable
37 * @v: pointer of type atomic_t
40 * Atomically sets the value of @v to @i.
42 #define atomic_set(v, i) WRITE_ONCE((v)->counter, (i))
44 #define ATOMIC_OP(op, c_op, asm_op) \
45 static __inline__ void atomic_##op(int i, atomic_t * v) \
47 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
50 __asm__ __volatile__( \
51 " .set arch=r4000 \n" \
52 "1: ll %0, %1 # atomic_" #op " \n" \
53 " " #asm_op " %0, %2 \n" \
57 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
59 } else if (kernel_uses_llsc) { \
63 __asm__ __volatile__( \
64 " .set "MIPS_ISA_LEVEL" \n" \
65 " ll %0, %1 # atomic_" #op "\n" \
66 " " #asm_op " %0, %2 \n" \
69 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
71 } while (unlikely(!temp)); \
73 unsigned long flags; \
75 raw_local_irq_save(flags); \
77 raw_local_irq_restore(flags); \
81 #define ATOMIC_OP_RETURN(op, c_op, asm_op) \
82 static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v) \
86 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
89 __asm__ __volatile__( \
90 " .set arch=r4000 \n" \
91 "1: ll %1, %2 # atomic_" #op "_return \n" \
92 " " #asm_op " %0, %1, %3 \n" \
95 " " #asm_op " %0, %1, %3 \n" \
97 : "=&r" (result), "=&r" (temp), \
98 "+" GCC_OFF_SMALL_ASM() (v->counter) \
100 } else if (kernel_uses_llsc) { \
104 __asm__ __volatile__( \
105 " .set "MIPS_ISA_LEVEL" \n" \
106 " ll %1, %2 # atomic_" #op "_return \n" \
107 " " #asm_op " %0, %1, %3 \n" \
110 : "=&r" (result), "=&r" (temp), \
111 "+" GCC_OFF_SMALL_ASM() (v->counter) \
113 } while (unlikely(!result)); \
115 result = temp; result c_op i; \
117 unsigned long flags; \
119 raw_local_irq_save(flags); \
120 result = v->counter; \
122 v->counter = result; \
123 raw_local_irq_restore(flags); \
129 #define ATOMIC_FETCH_OP(op, c_op, asm_op) \
130 static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v) \
134 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
137 __asm__ __volatile__( \
138 " .set arch=r4000 \n" \
139 "1: ll %1, %2 # atomic_fetch_" #op " \n" \
140 " " #asm_op " %0, %1, %3 \n" \
145 : "=&r" (result), "=&r" (temp), \
146 "+" GCC_OFF_SMALL_ASM() (v->counter) \
148 } else if (kernel_uses_llsc) { \
152 __asm__ __volatile__( \
153 " .set "MIPS_ISA_LEVEL" \n" \
154 " ll %1, %2 # atomic_fetch_" #op " \n" \
155 " " #asm_op " %0, %1, %3 \n" \
158 : "=&r" (result), "=&r" (temp), \
159 "+" GCC_OFF_SMALL_ASM() (v->counter) \
161 } while (unlikely(!result)); \
165 unsigned long flags; \
167 raw_local_irq_save(flags); \
168 result = v->counter; \
170 raw_local_irq_restore(flags); \
176 #define ATOMIC_OPS(op, c_op, asm_op) \
177 ATOMIC_OP(op, c_op, asm_op) \
178 ATOMIC_OP_RETURN(op, c_op, asm_op) \
179 ATOMIC_FETCH_OP(op, c_op, asm_op)
181 ATOMIC_OPS(add
, +=, addu
)
182 ATOMIC_OPS(sub
, -=, subu
)
184 #define atomic_add_return_relaxed atomic_add_return_relaxed
185 #define atomic_sub_return_relaxed atomic_sub_return_relaxed
186 #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
187 #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
190 #define ATOMIC_OPS(op, c_op, asm_op) \
191 ATOMIC_OP(op, c_op, asm_op) \
192 ATOMIC_FETCH_OP(op, c_op, asm_op)
194 ATOMIC_OPS(and, &=, and)
195 ATOMIC_OPS(or, |=, or)
196 ATOMIC_OPS(xor, ^=, xor)
198 #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
199 #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
200 #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
203 #undef ATOMIC_FETCH_OP
204 #undef ATOMIC_OP_RETURN
208 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
209 * @i: integer value to subtract
210 * @v: pointer of type atomic_t
212 * Atomically test @v and subtract @i if @v is greater or equal than @i.
213 * The function returns the old value of @v minus @i.
215 static __inline__
int atomic_sub_if_positive(int i
, atomic_t
* v
)
219 smp_mb__before_llsc();
221 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
224 __asm__
__volatile__(
225 " .set arch=r4000 \n"
226 "1: ll %1, %2 # atomic_sub_if_positive\n"
227 " subu %0, %1, %3 \n"
232 " subu %0, %1, %3 \n"
236 : "=&r" (result
), "=&r" (temp
),
237 "+" GCC_OFF_SMALL_ASM() (v
->counter
)
238 : "Ir" (i
), GCC_OFF_SMALL_ASM() (v
->counter
)
240 } else if (kernel_uses_llsc
) {
243 __asm__
__volatile__(
244 " .set "MIPS_ISA_LEVEL
" \n"
245 "1: ll %1, %2 # atomic_sub_if_positive\n"
246 " subu %0, %1, %3 \n"
251 " subu %0, %1, %3 \n"
255 : "=&r" (result
), "=&r" (temp
),
256 "+" GCC_OFF_SMALL_ASM() (v
->counter
)
261 raw_local_irq_save(flags
);
266 raw_local_irq_restore(flags
);
274 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
275 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
278 * __atomic_add_unless - add unless the number is a given value
279 * @v: pointer of type atomic_t
280 * @a: the amount to add to v...
281 * @u: ...unless v is equal to u.
283 * Atomically adds @a to @v, so long as it was not @u.
284 * Returns the old value of @v.
286 static __inline__
int __atomic_add_unless(atomic_t
*v
, int a
, int u
)
291 if (unlikely(c
== (u
)))
293 old
= atomic_cmpxchg((v
), c
, c
+ (a
));
294 if (likely(old
== c
))
301 #define atomic_dec_return(v) atomic_sub_return(1, (v))
302 #define atomic_inc_return(v) atomic_add_return(1, (v))
305 * atomic_sub_and_test - subtract value from variable and test result
306 * @i: integer value to subtract
307 * @v: pointer of type atomic_t
309 * Atomically subtracts @i from @v and returns
310 * true if the result is zero, or false for all
313 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
316 * atomic_inc_and_test - increment and test
317 * @v: pointer of type atomic_t
319 * Atomically increments @v by 1
320 * and returns true if the result is zero, or false for all
323 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
326 * atomic_dec_and_test - decrement by 1 and test
327 * @v: pointer of type atomic_t
329 * Atomically decrements @v by 1 and
330 * returns true if the result is 0, or false for all other
333 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
336 * atomic_dec_if_positive - decrement by 1 if old value positive
337 * @v: pointer of type atomic_t
339 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
342 * atomic_inc - increment atomic variable
343 * @v: pointer of type atomic_t
345 * Atomically increments @v by 1.
347 #define atomic_inc(v) atomic_add(1, (v))
350 * atomic_dec - decrement and test
351 * @v: pointer of type atomic_t
353 * Atomically decrements @v by 1.
355 #define atomic_dec(v) atomic_sub(1, (v))
358 * atomic_add_negative - add and test if negative
359 * @v: pointer of type atomic_t
360 * @i: integer value to add
362 * Atomically adds @i to @v and returns true
363 * if the result is negative, or false when
364 * result is greater than or equal to zero.
366 #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
370 #define ATOMIC64_INIT(i) { (i) }
373 * atomic64_read - read atomic variable
374 * @v: pointer of type atomic64_t
377 #define atomic64_read(v) READ_ONCE((v)->counter)
380 * atomic64_set - set atomic variable
381 * @v: pointer of type atomic64_t
384 #define atomic64_set(v, i) WRITE_ONCE((v)->counter, (i))
386 #define ATOMIC64_OP(op, c_op, asm_op) \
387 static __inline__ void atomic64_##op(long i, atomic64_t * v) \
389 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
392 __asm__ __volatile__( \
393 " .set arch=r4000 \n" \
394 "1: lld %0, %1 # atomic64_" #op " \n" \
395 " " #asm_op " %0, %2 \n" \
399 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
401 } else if (kernel_uses_llsc) { \
405 __asm__ __volatile__( \
406 " .set "MIPS_ISA_LEVEL" \n" \
407 " lld %0, %1 # atomic64_" #op "\n" \
408 " " #asm_op " %0, %2 \n" \
411 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
413 } while (unlikely(!temp)); \
415 unsigned long flags; \
417 raw_local_irq_save(flags); \
419 raw_local_irq_restore(flags); \
423 #define ATOMIC64_OP_RETURN(op, c_op, asm_op) \
424 static __inline__ long atomic64_##op##_return_relaxed(long i, atomic64_t * v) \
428 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
431 __asm__ __volatile__( \
432 " .set arch=r4000 \n" \
433 "1: lld %1, %2 # atomic64_" #op "_return\n" \
434 " " #asm_op " %0, %1, %3 \n" \
437 " " #asm_op " %0, %1, %3 \n" \
439 : "=&r" (result), "=&r" (temp), \
440 "+" GCC_OFF_SMALL_ASM() (v->counter) \
442 } else if (kernel_uses_llsc) { \
446 __asm__ __volatile__( \
447 " .set "MIPS_ISA_LEVEL" \n" \
448 " lld %1, %2 # atomic64_" #op "_return\n" \
449 " " #asm_op " %0, %1, %3 \n" \
452 : "=&r" (result), "=&r" (temp), \
453 "=" GCC_OFF_SMALL_ASM() (v->counter) \
454 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \
456 } while (unlikely(!result)); \
458 result = temp; result c_op i; \
460 unsigned long flags; \
462 raw_local_irq_save(flags); \
463 result = v->counter; \
465 v->counter = result; \
466 raw_local_irq_restore(flags); \
472 #define ATOMIC64_FETCH_OP(op, c_op, asm_op) \
473 static __inline__ long atomic64_fetch_##op##_relaxed(long i, atomic64_t * v) \
477 if (kernel_uses_llsc && R10000_LLSC_WAR) { \
480 __asm__ __volatile__( \
481 " .set arch=r4000 \n" \
482 "1: lld %1, %2 # atomic64_fetch_" #op "\n" \
483 " " #asm_op " %0, %1, %3 \n" \
488 : "=&r" (result), "=&r" (temp), \
489 "+" GCC_OFF_SMALL_ASM() (v->counter) \
491 } else if (kernel_uses_llsc) { \
495 __asm__ __volatile__( \
496 " .set "MIPS_ISA_LEVEL" \n" \
497 " lld %1, %2 # atomic64_fetch_" #op "\n" \
498 " " #asm_op " %0, %1, %3 \n" \
501 : "=&r" (result), "=&r" (temp), \
502 "=" GCC_OFF_SMALL_ASM() (v->counter) \
503 : "Ir" (i), GCC_OFF_SMALL_ASM() (v->counter) \
505 } while (unlikely(!result)); \
509 unsigned long flags; \
511 raw_local_irq_save(flags); \
512 result = v->counter; \
514 raw_local_irq_restore(flags); \
520 #define ATOMIC64_OPS(op, c_op, asm_op) \
521 ATOMIC64_OP(op, c_op, asm_op) \
522 ATOMIC64_OP_RETURN(op, c_op, asm_op) \
523 ATOMIC64_FETCH_OP(op, c_op, asm_op)
525 ATOMIC64_OPS(add
, +=, daddu
)
526 ATOMIC64_OPS(sub
, -=, dsubu
)
528 #define atomic64_add_return_relaxed atomic64_add_return_relaxed
529 #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
530 #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
531 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
534 #define ATOMIC64_OPS(op, c_op, asm_op) \
535 ATOMIC64_OP(op, c_op, asm_op) \
536 ATOMIC64_FETCH_OP(op, c_op, asm_op)
538 ATOMIC64_OPS(and, &=, and)
539 ATOMIC64_OPS(or, |=, or)
540 ATOMIC64_OPS(xor, ^=, xor)
542 #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
543 #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
544 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
547 #undef ATOMIC64_FETCH_OP
548 #undef ATOMIC64_OP_RETURN
552 * atomic64_sub_if_positive - conditionally subtract integer from atomic
554 * @i: integer value to subtract
555 * @v: pointer of type atomic64_t
557 * Atomically test @v and subtract @i if @v is greater or equal than @i.
558 * The function returns the old value of @v minus @i.
560 static __inline__
long atomic64_sub_if_positive(long i
, atomic64_t
* v
)
564 smp_mb__before_llsc();
566 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
569 __asm__
__volatile__(
570 " .set arch=r4000 \n"
571 "1: lld %1, %2 # atomic64_sub_if_positive\n"
572 " dsubu %0, %1, %3 \n"
577 " dsubu %0, %1, %3 \n"
581 : "=&r" (result
), "=&r" (temp
),
582 "=" GCC_OFF_SMALL_ASM() (v
->counter
)
583 : "Ir" (i
), GCC_OFF_SMALL_ASM() (v
->counter
)
585 } else if (kernel_uses_llsc
) {
588 __asm__
__volatile__(
589 " .set "MIPS_ISA_LEVEL
" \n"
590 "1: lld %1, %2 # atomic64_sub_if_positive\n"
591 " dsubu %0, %1, %3 \n"
596 " dsubu %0, %1, %3 \n"
600 : "=&r" (result
), "=&r" (temp
),
601 "+" GCC_OFF_SMALL_ASM() (v
->counter
)
606 raw_local_irq_save(flags
);
611 raw_local_irq_restore(flags
);
619 #define atomic64_cmpxchg(v, o, n) \
620 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
621 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
624 * atomic64_add_unless - add unless the number is a given value
625 * @v: pointer of type atomic64_t
626 * @a: the amount to add to v...
627 * @u: ...unless v is equal to u.
629 * Atomically adds @a to @v, so long as it was not @u.
630 * Returns true iff @v was not @u.
632 static __inline__
int atomic64_add_unless(atomic64_t
*v
, long a
, long u
)
635 c
= atomic64_read(v
);
637 if (unlikely(c
== (u
)))
639 old
= atomic64_cmpxchg((v
), c
, c
+ (a
));
640 if (likely(old
== c
))
647 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
649 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
650 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
653 * atomic64_sub_and_test - subtract value from variable and test result
654 * @i: integer value to subtract
655 * @v: pointer of type atomic64_t
657 * Atomically subtracts @i from @v and returns
658 * true if the result is zero, or false for all
661 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
664 * atomic64_inc_and_test - increment and test
665 * @v: pointer of type atomic64_t
667 * Atomically increments @v by 1
668 * and returns true if the result is zero, or false for all
671 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
674 * atomic64_dec_and_test - decrement by 1 and test
675 * @v: pointer of type atomic64_t
677 * Atomically decrements @v by 1 and
678 * returns true if the result is 0, or false for all other
681 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
684 * atomic64_dec_if_positive - decrement by 1 if old value positive
685 * @v: pointer of type atomic64_t
687 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
690 * atomic64_inc - increment atomic variable
691 * @v: pointer of type atomic64_t
693 * Atomically increments @v by 1.
695 #define atomic64_inc(v) atomic64_add(1, (v))
698 * atomic64_dec - decrement and test
699 * @v: pointer of type atomic64_t
701 * Atomically decrements @v by 1.
703 #define atomic64_dec(v) atomic64_sub(1, (v))
706 * atomic64_add_negative - add and test if negative
707 * @v: pointer of type atomic64_t
708 * @i: integer value to add
710 * Atomically adds @i to @v and returns true
711 * if the result is negative, or false when
712 * result is greater than or equal to zero.
714 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
716 #endif /* CONFIG_64BIT */
718 #endif /* _ASM_ATOMIC_H */
This page took 0.057875 seconds and 5 git commands to generate.