2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/cpu-features.h>
22 #include <asm/system.h>
24 #define ATOMIC_INIT(i) { (i) }
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
30 * Atomically reads the value of @v.
32 #define atomic_read(v) (*(volatile int *)&(v)->counter)
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
39 * Atomically sets the value of @v to @i.
41 #define atomic_set(v, i) ((v)->counter = (i))
44 * atomic_add - add integer to atomic variable
45 * @i: integer value to add
46 * @v: pointer of type atomic_t
48 * Atomically adds @i to @v.
50 static __inline__
void atomic_add(int i
, atomic_t
* v
)
52 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
57 "1: ll %0, %1 # atomic_add \n"
62 : "=&r" (temp
), "=m" (v
->counter
)
63 : "Ir" (i
), "m" (v
->counter
));
64 } else if (kernel_uses_llsc
) {
70 " ll %0, %1 # atomic_add \n"
74 : "=&r" (temp
), "=m" (v
->counter
)
75 : "Ir" (i
), "m" (v
->counter
));
76 } while (unlikely(!temp
));
80 raw_local_irq_save(flags
);
82 raw_local_irq_restore(flags
);
87 * atomic_sub - subtract the atomic variable
88 * @i: integer value to subtract
89 * @v: pointer of type atomic_t
91 * Atomically subtracts @i from @v.
93 static __inline__
void atomic_sub(int i
, atomic_t
* v
)
95 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
100 "1: ll %0, %1 # atomic_sub \n"
105 : "=&r" (temp
), "=m" (v
->counter
)
106 : "Ir" (i
), "m" (v
->counter
));
107 } else if (kernel_uses_llsc
) {
111 __asm__
__volatile__(
113 " ll %0, %1 # atomic_sub \n"
117 : "=&r" (temp
), "=m" (v
->counter
)
118 : "Ir" (i
), "m" (v
->counter
));
119 } while (unlikely(!temp
));
123 raw_local_irq_save(flags
);
125 raw_local_irq_restore(flags
);
130 * Same as above, but return the result value
132 static __inline__
int atomic_add_return(int i
, atomic_t
* v
)
136 smp_mb__before_llsc();
138 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
141 __asm__
__volatile__(
143 "1: ll %1, %2 # atomic_add_return \n"
144 " addu %0, %1, %3 \n"
147 " addu %0, %1, %3 \n"
149 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
150 : "Ir" (i
), "m" (v
->counter
)
152 } else if (kernel_uses_llsc
) {
156 __asm__
__volatile__(
158 " ll %1, %2 # atomic_add_return \n"
159 " addu %0, %1, %3 \n"
162 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
163 : "Ir" (i
), "m" (v
->counter
)
165 } while (unlikely(!result
));
171 raw_local_irq_save(flags
);
175 raw_local_irq_restore(flags
);
183 static __inline__
int atomic_sub_return(int i
, atomic_t
* v
)
187 smp_mb__before_llsc();
189 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
192 __asm__
__volatile__(
194 "1: ll %1, %2 # atomic_sub_return \n"
195 " subu %0, %1, %3 \n"
198 " subu %0, %1, %3 \n"
200 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
201 : "Ir" (i
), "m" (v
->counter
)
205 } else if (kernel_uses_llsc
) {
209 __asm__
__volatile__(
211 " ll %1, %2 # atomic_sub_return \n"
212 " subu %0, %1, %3 \n"
215 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
216 : "Ir" (i
), "m" (v
->counter
)
218 } while (unlikely(!result
));
224 raw_local_irq_save(flags
);
228 raw_local_irq_restore(flags
);
237 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
238 * @i: integer value to subtract
239 * @v: pointer of type atomic_t
241 * Atomically test @v and subtract @i if @v is greater or equal than @i.
242 * The function returns the old value of @v minus @i.
244 static __inline__
int atomic_sub_if_positive(int i
, atomic_t
* v
)
248 smp_mb__before_llsc();
250 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
253 __asm__
__volatile__(
255 "1: ll %1, %2 # atomic_sub_if_positive\n"
256 " subu %0, %1, %3 \n"
261 " subu %0, %1, %3 \n"
265 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
266 : "Ir" (i
), "m" (v
->counter
)
268 } else if (kernel_uses_llsc
) {
271 __asm__
__volatile__(
273 "1: ll %1, %2 # atomic_sub_if_positive\n"
274 " subu %0, %1, %3 \n"
279 " subu %0, %1, %3 \n"
283 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
284 : "Ir" (i
), "m" (v
->counter
)
289 raw_local_irq_save(flags
);
294 raw_local_irq_restore(flags
);
302 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
303 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
306 * __atomic_add_unless - add unless the number is a given value
307 * @v: pointer of type atomic_t
308 * @a: the amount to add to v...
309 * @u: ...unless v is equal to u.
311 * Atomically adds @a to @v, so long as it was not @u.
312 * Returns the old value of @v.
314 static __inline__
int __atomic_add_unless(atomic_t
*v
, int a
, int u
)
319 if (unlikely(c
== (u
)))
321 old
= atomic_cmpxchg((v
), c
, c
+ (a
));
322 if (likely(old
== c
))
329 #define atomic_dec_return(v) atomic_sub_return(1, (v))
330 #define atomic_inc_return(v) atomic_add_return(1, (v))
333 * atomic_sub_and_test - subtract value from variable and test result
334 * @i: integer value to subtract
335 * @v: pointer of type atomic_t
337 * Atomically subtracts @i from @v and returns
338 * true if the result is zero, or false for all
341 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
344 * atomic_inc_and_test - increment and test
345 * @v: pointer of type atomic_t
347 * Atomically increments @v by 1
348 * and returns true if the result is zero, or false for all
351 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
354 * atomic_dec_and_test - decrement by 1 and test
355 * @v: pointer of type atomic_t
357 * Atomically decrements @v by 1 and
358 * returns true if the result is 0, or false for all other
361 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
364 * atomic_dec_if_positive - decrement by 1 if old value positive
365 * @v: pointer of type atomic_t
367 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
370 * atomic_inc - increment atomic variable
371 * @v: pointer of type atomic_t
373 * Atomically increments @v by 1.
375 #define atomic_inc(v) atomic_add(1, (v))
378 * atomic_dec - decrement and test
379 * @v: pointer of type atomic_t
381 * Atomically decrements @v by 1.
383 #define atomic_dec(v) atomic_sub(1, (v))
386 * atomic_add_negative - add and test if negative
387 * @v: pointer of type atomic_t
388 * @i: integer value to add
390 * Atomically adds @i to @v and returns true
391 * if the result is negative, or false when
392 * result is greater than or equal to zero.
394 #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
398 #define ATOMIC64_INIT(i) { (i) }
401 * atomic64_read - read atomic variable
402 * @v: pointer of type atomic64_t
405 #define atomic64_read(v) (*(volatile long *)&(v)->counter)
408 * atomic64_set - set atomic variable
409 * @v: pointer of type atomic64_t
412 #define atomic64_set(v, i) ((v)->counter = (i))
415 * atomic64_add - add integer to atomic variable
416 * @i: integer value to add
417 * @v: pointer of type atomic64_t
419 * Atomically adds @i to @v.
421 static __inline__
void atomic64_add(long i
, atomic64_t
* v
)
423 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
426 __asm__
__volatile__(
428 "1: lld %0, %1 # atomic64_add \n"
433 : "=&r" (temp
), "=m" (v
->counter
)
434 : "Ir" (i
), "m" (v
->counter
));
435 } else if (kernel_uses_llsc
) {
439 __asm__
__volatile__(
441 " lld %0, %1 # atomic64_add \n"
445 : "=&r" (temp
), "=m" (v
->counter
)
446 : "Ir" (i
), "m" (v
->counter
));
447 } while (unlikely(!temp
));
451 raw_local_irq_save(flags
);
453 raw_local_irq_restore(flags
);
458 * atomic64_sub - subtract the atomic variable
459 * @i: integer value to subtract
460 * @v: pointer of type atomic64_t
462 * Atomically subtracts @i from @v.
464 static __inline__
void atomic64_sub(long i
, atomic64_t
* v
)
466 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
469 __asm__
__volatile__(
471 "1: lld %0, %1 # atomic64_sub \n"
476 : "=&r" (temp
), "=m" (v
->counter
)
477 : "Ir" (i
), "m" (v
->counter
));
478 } else if (kernel_uses_llsc
) {
482 __asm__
__volatile__(
484 " lld %0, %1 # atomic64_sub \n"
488 : "=&r" (temp
), "=m" (v
->counter
)
489 : "Ir" (i
), "m" (v
->counter
));
490 } while (unlikely(!temp
));
494 raw_local_irq_save(flags
);
496 raw_local_irq_restore(flags
);
501 * Same as above, but return the result value
503 static __inline__
long atomic64_add_return(long i
, atomic64_t
* v
)
507 smp_mb__before_llsc();
509 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
512 __asm__
__volatile__(
514 "1: lld %1, %2 # atomic64_add_return \n"
515 " daddu %0, %1, %3 \n"
518 " daddu %0, %1, %3 \n"
520 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
521 : "Ir" (i
), "m" (v
->counter
)
523 } else if (kernel_uses_llsc
) {
527 __asm__
__volatile__(
529 " lld %1, %2 # atomic64_add_return \n"
530 " daddu %0, %1, %3 \n"
533 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
534 : "Ir" (i
), "m" (v
->counter
)
536 } while (unlikely(!result
));
542 raw_local_irq_save(flags
);
546 raw_local_irq_restore(flags
);
554 static __inline__
long atomic64_sub_return(long i
, atomic64_t
* v
)
558 smp_mb__before_llsc();
560 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
563 __asm__
__volatile__(
565 "1: lld %1, %2 # atomic64_sub_return \n"
566 " dsubu %0, %1, %3 \n"
569 " dsubu %0, %1, %3 \n"
571 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
572 : "Ir" (i
), "m" (v
->counter
)
574 } else if (kernel_uses_llsc
) {
578 __asm__
__volatile__(
580 " lld %1, %2 # atomic64_sub_return \n"
581 " dsubu %0, %1, %3 \n"
584 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
585 : "Ir" (i
), "m" (v
->counter
)
587 } while (unlikely(!result
));
593 raw_local_irq_save(flags
);
597 raw_local_irq_restore(flags
);
606 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
607 * @i: integer value to subtract
608 * @v: pointer of type atomic64_t
610 * Atomically test @v and subtract @i if @v is greater or equal than @i.
611 * The function returns the old value of @v minus @i.
613 static __inline__
long atomic64_sub_if_positive(long i
, atomic64_t
* v
)
617 smp_mb__before_llsc();
619 if (kernel_uses_llsc
&& R10000_LLSC_WAR
) {
622 __asm__
__volatile__(
624 "1: lld %1, %2 # atomic64_sub_if_positive\n"
625 " dsubu %0, %1, %3 \n"
630 " dsubu %0, %1, %3 \n"
634 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
635 : "Ir" (i
), "m" (v
->counter
)
637 } else if (kernel_uses_llsc
) {
640 __asm__
__volatile__(
642 "1: lld %1, %2 # atomic64_sub_if_positive\n"
643 " dsubu %0, %1, %3 \n"
648 " dsubu %0, %1, %3 \n"
652 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
653 : "Ir" (i
), "m" (v
->counter
)
658 raw_local_irq_save(flags
);
663 raw_local_irq_restore(flags
);
671 #define atomic64_cmpxchg(v, o, n) \
672 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
673 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
676 * atomic64_add_unless - add unless the number is a given value
677 * @v: pointer of type atomic64_t
678 * @a: the amount to add to v...
679 * @u: ...unless v is equal to u.
681 * Atomically adds @a to @v, so long as it was not @u.
682 * Returns the old value of @v.
684 static __inline__
int atomic64_add_unless(atomic64_t
*v
, long a
, long u
)
687 c
= atomic64_read(v
);
689 if (unlikely(c
== (u
)))
691 old
= atomic64_cmpxchg((v
), c
, c
+ (a
));
692 if (likely(old
== c
))
699 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
701 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
702 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
705 * atomic64_sub_and_test - subtract value from variable and test result
706 * @i: integer value to subtract
707 * @v: pointer of type atomic64_t
709 * Atomically subtracts @i from @v and returns
710 * true if the result is zero, or false for all
713 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
716 * atomic64_inc_and_test - increment and test
717 * @v: pointer of type atomic64_t
719 * Atomically increments @v by 1
720 * and returns true if the result is zero, or false for all
723 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
726 * atomic64_dec_and_test - decrement by 1 and test
727 * @v: pointer of type atomic64_t
729 * Atomically decrements @v by 1 and
730 * returns true if the result is 0, or false for all other
733 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
736 * atomic64_dec_if_positive - decrement by 1 if old value positive
737 * @v: pointer of type atomic64_t
739 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
742 * atomic64_inc - increment atomic variable
743 * @v: pointer of type atomic64_t
745 * Atomically increments @v by 1.
747 #define atomic64_inc(v) atomic64_add(1, (v))
750 * atomic64_dec - decrement and test
751 * @v: pointer of type atomic64_t
753 * Atomically decrements @v by 1.
755 #define atomic64_dec(v) atomic64_sub(1, (v))
758 * atomic64_add_negative - add and test if negative
759 * @v: pointer of type atomic64_t
760 * @i: integer value to add
762 * Atomically adds @i to @v and returns true
763 * if the result is negative, or false when
764 * result is greater than or equal to zero.
766 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
768 #else /* !CONFIG_64BIT */
770 #include <asm-generic/atomic64.h>
772 #endif /* CONFIG_64BIT */
775 * atomic*_return operations are serializing but not the non-*_return
778 #define smp_mb__before_atomic_dec() smp_mb__before_llsc()
779 #define smp_mb__after_atomic_dec() smp_llsc_mb()
780 #define smp_mb__before_atomic_inc() smp_mb__before_llsc()
781 #define smp_mb__after_atomic_inc() smp_llsc_mb()
783 #include <asm-generic/atomic-long.h>
785 #endif /* _ASM_ATOMIC_H */
This page took 0.07137 seconds and 5 git commands to generate.