2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <asm/barrier.h>
19 #include <asm/cpu-features.h>
22 typedef struct { volatile int counter
; } atomic_t
;
24 #define ATOMIC_INIT(i) { (i) }
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
30 * Atomically reads the value of @v.
32 #define atomic_read(v) ((v)->counter)
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
39 * Atomically sets the value of @v to @i.
41 #define atomic_set(v,i) ((v)->counter = (i))
44 * atomic_add - add integer to atomic variable
45 * @i: integer value to add
46 * @v: pointer of type atomic_t
48 * Atomically adds @i to @v.
50 static __inline__
void atomic_add(int i
, atomic_t
* v
)
52 if (cpu_has_llsc
&& R10000_LLSC_WAR
) {
57 "1: ll %0, %1 # atomic_add \n"
62 : "=&r" (temp
), "=m" (v
->counter
)
63 : "Ir" (i
), "m" (v
->counter
));
64 } else if (cpu_has_llsc
) {
69 "1: ll %0, %1 # atomic_add \n"
74 : "=&r" (temp
), "=m" (v
->counter
)
75 : "Ir" (i
), "m" (v
->counter
));
79 local_irq_save(flags
);
81 local_irq_restore(flags
);
86 * atomic_sub - subtract the atomic variable
87 * @i: integer value to subtract
88 * @v: pointer of type atomic_t
90 * Atomically subtracts @i from @v.
92 static __inline__
void atomic_sub(int i
, atomic_t
* v
)
94 if (cpu_has_llsc
&& R10000_LLSC_WAR
) {
99 "1: ll %0, %1 # atomic_sub \n"
104 : "=&r" (temp
), "=m" (v
->counter
)
105 : "Ir" (i
), "m" (v
->counter
));
106 } else if (cpu_has_llsc
) {
109 __asm__
__volatile__(
111 "1: ll %0, %1 # atomic_sub \n"
116 : "=&r" (temp
), "=m" (v
->counter
)
117 : "Ir" (i
), "m" (v
->counter
));
121 local_irq_save(flags
);
123 local_irq_restore(flags
);
128 * Same as above, but return the result value
130 static __inline__
int atomic_add_return(int i
, atomic_t
* v
)
132 unsigned long result
;
136 if (cpu_has_llsc
&& R10000_LLSC_WAR
) {
139 __asm__
__volatile__(
141 "1: ll %1, %2 # atomic_add_return \n"
142 " addu %0, %1, %3 \n"
145 " addu %0, %1, %3 \n"
147 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
148 : "Ir" (i
), "m" (v
->counter
)
150 } else if (cpu_has_llsc
) {
153 __asm__
__volatile__(
155 "1: ll %1, %2 # atomic_add_return \n"
156 " addu %0, %1, %3 \n"
159 " addu %0, %1, %3 \n"
161 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
162 : "Ir" (i
), "m" (v
->counter
)
167 local_irq_save(flags
);
171 local_irq_restore(flags
);
179 static __inline__
int atomic_sub_return(int i
, atomic_t
* v
)
181 unsigned long result
;
185 if (cpu_has_llsc
&& R10000_LLSC_WAR
) {
188 __asm__
__volatile__(
190 "1: ll %1, %2 # atomic_sub_return \n"
191 " subu %0, %1, %3 \n"
194 " subu %0, %1, %3 \n"
196 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
197 : "Ir" (i
), "m" (v
->counter
)
199 } else if (cpu_has_llsc
) {
202 __asm__
__volatile__(
204 "1: ll %1, %2 # atomic_sub_return \n"
205 " subu %0, %1, %3 \n"
208 " subu %0, %1, %3 \n"
210 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
211 : "Ir" (i
), "m" (v
->counter
)
216 local_irq_save(flags
);
220 local_irq_restore(flags
);
229 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
230 * @i: integer value to subtract
231 * @v: pointer of type atomic_t
233 * Atomically test @v and subtract @i if @v is greater or equal than @i.
234 * The function returns the old value of @v minus @i.
236 static __inline__
int atomic_sub_if_positive(int i
, atomic_t
* v
)
238 unsigned long result
;
242 if (cpu_has_llsc
&& R10000_LLSC_WAR
) {
245 __asm__
__volatile__(
247 "1: ll %1, %2 # atomic_sub_if_positive\n"
248 " subu %0, %1, %3 \n"
253 " subu %0, %1, %3 \n"
257 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
258 : "Ir" (i
), "m" (v
->counter
)
260 } else if (cpu_has_llsc
) {
263 __asm__
__volatile__(
265 "1: ll %1, %2 # atomic_sub_if_positive\n"
266 " subu %0, %1, %3 \n"
271 " subu %0, %1, %3 \n"
275 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
276 : "Ir" (i
), "m" (v
->counter
)
281 local_irq_save(flags
);
286 local_irq_restore(flags
);
294 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
295 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
298 * atomic_add_unless - add unless the number is a given value
299 * @v: pointer of type atomic_t
300 * @a: the amount to add to v...
301 * @u: ...unless v is equal to u.
303 * Atomically adds @a to @v, so long as it was not @u.
304 * Returns non-zero if @v was not @u, and zero otherwise.
306 #define atomic_add_unless(v, a, u) \
309 c = atomic_read(v); \
310 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
314 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
316 #define atomic_dec_return(v) atomic_sub_return(1,(v))
317 #define atomic_inc_return(v) atomic_add_return(1,(v))
320 * atomic_sub_and_test - subtract value from variable and test result
321 * @i: integer value to subtract
322 * @v: pointer of type atomic_t
324 * Atomically subtracts @i from @v and returns
325 * true if the result is zero, or false for all
328 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
331 * atomic_inc_and_test - increment and test
332 * @v: pointer of type atomic_t
334 * Atomically increments @v by 1
335 * and returns true if the result is zero, or false for all
338 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
341 * atomic_dec_and_test - decrement by 1 and test
342 * @v: pointer of type atomic_t
344 * Atomically decrements @v by 1 and
345 * returns true if the result is 0, or false for all other
348 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
351 * atomic_dec_if_positive - decrement by 1 if old value positive
352 * @v: pointer of type atomic_t
354 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
357 * atomic_inc - increment atomic variable
358 * @v: pointer of type atomic_t
360 * Atomically increments @v by 1.
362 #define atomic_inc(v) atomic_add(1,(v))
365 * atomic_dec - decrement and test
366 * @v: pointer of type atomic_t
368 * Atomically decrements @v by 1.
370 #define atomic_dec(v) atomic_sub(1,(v))
373 * atomic_add_negative - add and test if negative
374 * @v: pointer of type atomic_t
375 * @i: integer value to add
377 * Atomically adds @i to @v and returns true
378 * if the result is negative, or false when
379 * result is greater than or equal to zero.
381 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
385 typedef struct { volatile long counter
; } atomic64_t
;
387 #define ATOMIC64_INIT(i) { (i) }
390 * atomic64_read - read atomic variable
391 * @v: pointer of type atomic64_t
394 #define atomic64_read(v) ((v)->counter)
397 * atomic64_set - set atomic variable
398 * @v: pointer of type atomic64_t
401 #define atomic64_set(v,i) ((v)->counter = (i))
404 * atomic64_add - add integer to atomic variable
405 * @i: integer value to add
406 * @v: pointer of type atomic64_t
408 * Atomically adds @i to @v.
410 static __inline__
void atomic64_add(long i
, atomic64_t
* v
)
412 if (cpu_has_llsc
&& R10000_LLSC_WAR
) {
415 __asm__
__volatile__(
417 "1: lld %0, %1 # atomic64_add \n"
422 : "=&r" (temp
), "=m" (v
->counter
)
423 : "Ir" (i
), "m" (v
->counter
));
424 } else if (cpu_has_llsc
) {
427 __asm__
__volatile__(
429 "1: lld %0, %1 # atomic64_add \n"
434 : "=&r" (temp
), "=m" (v
->counter
)
435 : "Ir" (i
), "m" (v
->counter
));
439 local_irq_save(flags
);
441 local_irq_restore(flags
);
446 * atomic64_sub - subtract the atomic variable
447 * @i: integer value to subtract
448 * @v: pointer of type atomic64_t
450 * Atomically subtracts @i from @v.
452 static __inline__
void atomic64_sub(long i
, atomic64_t
* v
)
454 if (cpu_has_llsc
&& R10000_LLSC_WAR
) {
457 __asm__
__volatile__(
459 "1: lld %0, %1 # atomic64_sub \n"
464 : "=&r" (temp
), "=m" (v
->counter
)
465 : "Ir" (i
), "m" (v
->counter
));
466 } else if (cpu_has_llsc
) {
469 __asm__
__volatile__(
471 "1: lld %0, %1 # atomic64_sub \n"
476 : "=&r" (temp
), "=m" (v
->counter
)
477 : "Ir" (i
), "m" (v
->counter
));
481 local_irq_save(flags
);
483 local_irq_restore(flags
);
488 * Same as above, but return the result value
490 static __inline__
long atomic64_add_return(long i
, atomic64_t
* v
)
492 unsigned long result
;
496 if (cpu_has_llsc
&& R10000_LLSC_WAR
) {
499 __asm__
__volatile__(
501 "1: lld %1, %2 # atomic64_add_return \n"
502 " addu %0, %1, %3 \n"
505 " addu %0, %1, %3 \n"
507 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
508 : "Ir" (i
), "m" (v
->counter
)
510 } else if (cpu_has_llsc
) {
513 __asm__
__volatile__(
515 "1: lld %1, %2 # atomic64_add_return \n"
516 " addu %0, %1, %3 \n"
519 " addu %0, %1, %3 \n"
521 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
522 : "Ir" (i
), "m" (v
->counter
)
527 local_irq_save(flags
);
531 local_irq_restore(flags
);
539 static __inline__
long atomic64_sub_return(long i
, atomic64_t
* v
)
541 unsigned long result
;
545 if (cpu_has_llsc
&& R10000_LLSC_WAR
) {
548 __asm__
__volatile__(
550 "1: lld %1, %2 # atomic64_sub_return \n"
551 " subu %0, %1, %3 \n"
554 " subu %0, %1, %3 \n"
556 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
557 : "Ir" (i
), "m" (v
->counter
)
559 } else if (cpu_has_llsc
) {
562 __asm__
__volatile__(
564 "1: lld %1, %2 # atomic64_sub_return \n"
565 " subu %0, %1, %3 \n"
568 " subu %0, %1, %3 \n"
570 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
571 : "Ir" (i
), "m" (v
->counter
)
576 local_irq_save(flags
);
580 local_irq_restore(flags
);
589 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
590 * @i: integer value to subtract
591 * @v: pointer of type atomic64_t
593 * Atomically test @v and subtract @i if @v is greater or equal than @i.
594 * The function returns the old value of @v minus @i.
596 static __inline__
long atomic64_sub_if_positive(long i
, atomic64_t
* v
)
598 unsigned long result
;
602 if (cpu_has_llsc
&& R10000_LLSC_WAR
) {
605 __asm__
__volatile__(
607 "1: lld %1, %2 # atomic64_sub_if_positive\n"
608 " dsubu %0, %1, %3 \n"
613 " dsubu %0, %1, %3 \n"
617 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
618 : "Ir" (i
), "m" (v
->counter
)
620 } else if (cpu_has_llsc
) {
623 __asm__
__volatile__(
625 "1: lld %1, %2 # atomic64_sub_if_positive\n"
626 " dsubu %0, %1, %3 \n"
631 " dsubu %0, %1, %3 \n"
635 : "=&r" (result
), "=&r" (temp
), "=m" (v
->counter
)
636 : "Ir" (i
), "m" (v
->counter
)
641 local_irq_save(flags
);
646 local_irq_restore(flags
);
654 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
655 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
658 * atomic64_sub_and_test - subtract value from variable and test result
659 * @i: integer value to subtract
660 * @v: pointer of type atomic64_t
662 * Atomically subtracts @i from @v and returns
663 * true if the result is zero, or false for all
666 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
669 * atomic64_inc_and_test - increment and test
670 * @v: pointer of type atomic64_t
672 * Atomically increments @v by 1
673 * and returns true if the result is zero, or false for all
676 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
679 * atomic64_dec_and_test - decrement by 1 and test
680 * @v: pointer of type atomic64_t
682 * Atomically decrements @v by 1 and
683 * returns true if the result is 0, or false for all other
686 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
689 * atomic64_dec_if_positive - decrement by 1 if old value positive
690 * @v: pointer of type atomic64_t
692 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
695 * atomic64_inc - increment atomic variable
696 * @v: pointer of type atomic64_t
698 * Atomically increments @v by 1.
700 #define atomic64_inc(v) atomic64_add(1,(v))
703 * atomic64_dec - decrement and test
704 * @v: pointer of type atomic64_t
706 * Atomically decrements @v by 1.
708 #define atomic64_dec(v) atomic64_sub(1,(v))
711 * atomic64_add_negative - add and test if negative
712 * @v: pointer of type atomic64_t
713 * @i: integer value to add
715 * Atomically adds @i to @v and returns true
716 * if the result is negative, or false when
717 * result is greater than or equal to zero.
719 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
721 #endif /* CONFIG_64BIT */
724 * atomic*_return operations are serializing but not the non-*_return
727 #define smp_mb__before_atomic_dec() smp_mb()
728 #define smp_mb__after_atomic_dec() smp_mb()
729 #define smp_mb__before_atomic_inc() smp_mb()
730 #define smp_mb__after_atomic_inc() smp_mb()
732 #include <asm-generic/atomic.h>
733 #endif /* _ASM_ATOMIC_H */
This page took 0.046275 seconds and 5 git commands to generate.