[MIPS] Discard .exit.text at linktime.
[deliverable/linux.git] / include / asm-mips / atomic.h
1 /*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
13 */
14 #ifndef _ASM_ATOMIC_H
15 #define _ASM_ATOMIC_H
16
17 #include <linux/irqflags.h>
18 #include <asm/barrier.h>
19 #include <asm/cpu-features.h>
20 #include <asm/war.h>
21
22 typedef struct { volatile int counter; } atomic_t;
23
24 #define ATOMIC_INIT(i) { (i) }
25
26 /*
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
29 *
30 * Atomically reads the value of @v.
31 */
32 #define atomic_read(v) ((v)->counter)
33
34 /*
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
37 * @i: required value
38 *
39 * Atomically sets the value of @v to @i.
40 */
41 #define atomic_set(v,i) ((v)->counter = (i))
42
43 /*
44 * atomic_add - add integer to atomic variable
45 * @i: integer value to add
46 * @v: pointer of type atomic_t
47 *
48 * Atomically adds @i to @v.
49 */
50 static __inline__ void atomic_add(int i, atomic_t * v)
51 {
52 if (cpu_has_llsc && R10000_LLSC_WAR) {
53 unsigned long temp;
54
55 __asm__ __volatile__(
56 " .set mips3 \n"
57 "1: ll %0, %1 # atomic_add \n"
58 " addu %0, %2 \n"
59 " sc %0, %1 \n"
60 " beqzl %0, 1b \n"
61 " .set mips0 \n"
62 : "=&r" (temp), "=m" (v->counter)
63 : "Ir" (i), "m" (v->counter));
64 } else if (cpu_has_llsc) {
65 unsigned long temp;
66
67 __asm__ __volatile__(
68 " .set mips3 \n"
69 "1: ll %0, %1 # atomic_add \n"
70 " addu %0, %2 \n"
71 " sc %0, %1 \n"
72 " beqz %0, 1b \n"
73 " .set mips0 \n"
74 : "=&r" (temp), "=m" (v->counter)
75 : "Ir" (i), "m" (v->counter));
76 } else {
77 unsigned long flags;
78
79 local_irq_save(flags);
80 v->counter += i;
81 local_irq_restore(flags);
82 }
83 }
84
85 /*
86 * atomic_sub - subtract the atomic variable
87 * @i: integer value to subtract
88 * @v: pointer of type atomic_t
89 *
90 * Atomically subtracts @i from @v.
91 */
92 static __inline__ void atomic_sub(int i, atomic_t * v)
93 {
94 if (cpu_has_llsc && R10000_LLSC_WAR) {
95 unsigned long temp;
96
97 __asm__ __volatile__(
98 " .set mips3 \n"
99 "1: ll %0, %1 # atomic_sub \n"
100 " subu %0, %2 \n"
101 " sc %0, %1 \n"
102 " beqzl %0, 1b \n"
103 " .set mips0 \n"
104 : "=&r" (temp), "=m" (v->counter)
105 : "Ir" (i), "m" (v->counter));
106 } else if (cpu_has_llsc) {
107 unsigned long temp;
108
109 __asm__ __volatile__(
110 " .set mips3 \n"
111 "1: ll %0, %1 # atomic_sub \n"
112 " subu %0, %2 \n"
113 " sc %0, %1 \n"
114 " beqz %0, 1b \n"
115 " .set mips0 \n"
116 : "=&r" (temp), "=m" (v->counter)
117 : "Ir" (i), "m" (v->counter));
118 } else {
119 unsigned long flags;
120
121 local_irq_save(flags);
122 v->counter -= i;
123 local_irq_restore(flags);
124 }
125 }
126
127 /*
128 * Same as above, but return the result value
129 */
130 static __inline__ int atomic_add_return(int i, atomic_t * v)
131 {
132 unsigned long result;
133
134 smp_mb();
135
136 if (cpu_has_llsc && R10000_LLSC_WAR) {
137 unsigned long temp;
138
139 __asm__ __volatile__(
140 " .set mips3 \n"
141 "1: ll %1, %2 # atomic_add_return \n"
142 " addu %0, %1, %3 \n"
143 " sc %0, %2 \n"
144 " beqzl %0, 1b \n"
145 " addu %0, %1, %3 \n"
146 " .set mips0 \n"
147 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
148 : "Ir" (i), "m" (v->counter)
149 : "memory");
150 } else if (cpu_has_llsc) {
151 unsigned long temp;
152
153 __asm__ __volatile__(
154 " .set mips3 \n"
155 "1: ll %1, %2 # atomic_add_return \n"
156 " addu %0, %1, %3 \n"
157 " sc %0, %2 \n"
158 " beqz %0, 1b \n"
159 " addu %0, %1, %3 \n"
160 " .set mips0 \n"
161 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
162 : "Ir" (i), "m" (v->counter)
163 : "memory");
164 } else {
165 unsigned long flags;
166
167 local_irq_save(flags);
168 result = v->counter;
169 result += i;
170 v->counter = result;
171 local_irq_restore(flags);
172 }
173
174 smp_mb();
175
176 return result;
177 }
178
179 static __inline__ int atomic_sub_return(int i, atomic_t * v)
180 {
181 unsigned long result;
182
183 smp_mb();
184
185 if (cpu_has_llsc && R10000_LLSC_WAR) {
186 unsigned long temp;
187
188 __asm__ __volatile__(
189 " .set mips3 \n"
190 "1: ll %1, %2 # atomic_sub_return \n"
191 " subu %0, %1, %3 \n"
192 " sc %0, %2 \n"
193 " beqzl %0, 1b \n"
194 " subu %0, %1, %3 \n"
195 " .set mips0 \n"
196 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
197 : "Ir" (i), "m" (v->counter)
198 : "memory");
199 } else if (cpu_has_llsc) {
200 unsigned long temp;
201
202 __asm__ __volatile__(
203 " .set mips3 \n"
204 "1: ll %1, %2 # atomic_sub_return \n"
205 " subu %0, %1, %3 \n"
206 " sc %0, %2 \n"
207 " beqz %0, 1b \n"
208 " subu %0, %1, %3 \n"
209 " .set mips0 \n"
210 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
211 : "Ir" (i), "m" (v->counter)
212 : "memory");
213 } else {
214 unsigned long flags;
215
216 local_irq_save(flags);
217 result = v->counter;
218 result -= i;
219 v->counter = result;
220 local_irq_restore(flags);
221 }
222
223 smp_mb();
224
225 return result;
226 }
227
228 /*
229 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
230 * @i: integer value to subtract
231 * @v: pointer of type atomic_t
232 *
233 * Atomically test @v and subtract @i if @v is greater or equal than @i.
234 * The function returns the old value of @v minus @i.
235 */
236 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
237 {
238 unsigned long result;
239
240 smp_mb();
241
242 if (cpu_has_llsc && R10000_LLSC_WAR) {
243 unsigned long temp;
244
245 __asm__ __volatile__(
246 " .set mips3 \n"
247 "1: ll %1, %2 # atomic_sub_if_positive\n"
248 " subu %0, %1, %3 \n"
249 " bltz %0, 1f \n"
250 " sc %0, %2 \n"
251 " .set noreorder \n"
252 " beqzl %0, 1b \n"
253 " subu %0, %1, %3 \n"
254 " .set reorder \n"
255 "1: \n"
256 " .set mips0 \n"
257 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
258 : "Ir" (i), "m" (v->counter)
259 : "memory");
260 } else if (cpu_has_llsc) {
261 unsigned long temp;
262
263 __asm__ __volatile__(
264 " .set mips3 \n"
265 "1: ll %1, %2 # atomic_sub_if_positive\n"
266 " subu %0, %1, %3 \n"
267 " bltz %0, 1f \n"
268 " sc %0, %2 \n"
269 " .set noreorder \n"
270 " beqz %0, 1b \n"
271 " subu %0, %1, %3 \n"
272 " .set reorder \n"
273 "1: \n"
274 " .set mips0 \n"
275 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
276 : "Ir" (i), "m" (v->counter)
277 : "memory");
278 } else {
279 unsigned long flags;
280
281 local_irq_save(flags);
282 result = v->counter;
283 result -= i;
284 if (result >= 0)
285 v->counter = result;
286 local_irq_restore(flags);
287 }
288
289 smp_mb();
290
291 return result;
292 }
293
294 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
295 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
296
297 /**
298 * atomic_add_unless - add unless the number is a given value
299 * @v: pointer of type atomic_t
300 * @a: the amount to add to v...
301 * @u: ...unless v is equal to u.
302 *
303 * Atomically adds @a to @v, so long as it was not @u.
304 * Returns non-zero if @v was not @u, and zero otherwise.
305 */
306 #define atomic_add_unless(v, a, u) \
307 ({ \
308 int c, old; \
309 c = atomic_read(v); \
310 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
311 c = old; \
312 c != (u); \
313 })
314 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
315
316 #define atomic_dec_return(v) atomic_sub_return(1,(v))
317 #define atomic_inc_return(v) atomic_add_return(1,(v))
318
319 /*
320 * atomic_sub_and_test - subtract value from variable and test result
321 * @i: integer value to subtract
322 * @v: pointer of type atomic_t
323 *
324 * Atomically subtracts @i from @v and returns
325 * true if the result is zero, or false for all
326 * other cases.
327 */
328 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
329
330 /*
331 * atomic_inc_and_test - increment and test
332 * @v: pointer of type atomic_t
333 *
334 * Atomically increments @v by 1
335 * and returns true if the result is zero, or false for all
336 * other cases.
337 */
338 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
339
340 /*
341 * atomic_dec_and_test - decrement by 1 and test
342 * @v: pointer of type atomic_t
343 *
344 * Atomically decrements @v by 1 and
345 * returns true if the result is 0, or false for all other
346 * cases.
347 */
348 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
349
350 /*
351 * atomic_dec_if_positive - decrement by 1 if old value positive
352 * @v: pointer of type atomic_t
353 */
354 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
355
356 /*
357 * atomic_inc - increment atomic variable
358 * @v: pointer of type atomic_t
359 *
360 * Atomically increments @v by 1.
361 */
362 #define atomic_inc(v) atomic_add(1,(v))
363
364 /*
365 * atomic_dec - decrement and test
366 * @v: pointer of type atomic_t
367 *
368 * Atomically decrements @v by 1.
369 */
370 #define atomic_dec(v) atomic_sub(1,(v))
371
372 /*
373 * atomic_add_negative - add and test if negative
374 * @v: pointer of type atomic_t
375 * @i: integer value to add
376 *
377 * Atomically adds @i to @v and returns true
378 * if the result is negative, or false when
379 * result is greater than or equal to zero.
380 */
381 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
382
383 #ifdef CONFIG_64BIT
384
385 typedef struct { volatile long counter; } atomic64_t;
386
387 #define ATOMIC64_INIT(i) { (i) }
388
389 /*
390 * atomic64_read - read atomic variable
391 * @v: pointer of type atomic64_t
392 *
393 */
394 #define atomic64_read(v) ((v)->counter)
395
396 /*
397 * atomic64_set - set atomic variable
398 * @v: pointer of type atomic64_t
399 * @i: required value
400 */
401 #define atomic64_set(v,i) ((v)->counter = (i))
402
403 /*
404 * atomic64_add - add integer to atomic variable
405 * @i: integer value to add
406 * @v: pointer of type atomic64_t
407 *
408 * Atomically adds @i to @v.
409 */
410 static __inline__ void atomic64_add(long i, atomic64_t * v)
411 {
412 if (cpu_has_llsc && R10000_LLSC_WAR) {
413 unsigned long temp;
414
415 __asm__ __volatile__(
416 " .set mips3 \n"
417 "1: lld %0, %1 # atomic64_add \n"
418 " addu %0, %2 \n"
419 " scd %0, %1 \n"
420 " beqzl %0, 1b \n"
421 " .set mips0 \n"
422 : "=&r" (temp), "=m" (v->counter)
423 : "Ir" (i), "m" (v->counter));
424 } else if (cpu_has_llsc) {
425 unsigned long temp;
426
427 __asm__ __volatile__(
428 " .set mips3 \n"
429 "1: lld %0, %1 # atomic64_add \n"
430 " addu %0, %2 \n"
431 " scd %0, %1 \n"
432 " beqz %0, 1b \n"
433 " .set mips0 \n"
434 : "=&r" (temp), "=m" (v->counter)
435 : "Ir" (i), "m" (v->counter));
436 } else {
437 unsigned long flags;
438
439 local_irq_save(flags);
440 v->counter += i;
441 local_irq_restore(flags);
442 }
443 }
444
445 /*
446 * atomic64_sub - subtract the atomic variable
447 * @i: integer value to subtract
448 * @v: pointer of type atomic64_t
449 *
450 * Atomically subtracts @i from @v.
451 */
452 static __inline__ void atomic64_sub(long i, atomic64_t * v)
453 {
454 if (cpu_has_llsc && R10000_LLSC_WAR) {
455 unsigned long temp;
456
457 __asm__ __volatile__(
458 " .set mips3 \n"
459 "1: lld %0, %1 # atomic64_sub \n"
460 " subu %0, %2 \n"
461 " scd %0, %1 \n"
462 " beqzl %0, 1b \n"
463 " .set mips0 \n"
464 : "=&r" (temp), "=m" (v->counter)
465 : "Ir" (i), "m" (v->counter));
466 } else if (cpu_has_llsc) {
467 unsigned long temp;
468
469 __asm__ __volatile__(
470 " .set mips3 \n"
471 "1: lld %0, %1 # atomic64_sub \n"
472 " subu %0, %2 \n"
473 " scd %0, %1 \n"
474 " beqz %0, 1b \n"
475 " .set mips0 \n"
476 : "=&r" (temp), "=m" (v->counter)
477 : "Ir" (i), "m" (v->counter));
478 } else {
479 unsigned long flags;
480
481 local_irq_save(flags);
482 v->counter -= i;
483 local_irq_restore(flags);
484 }
485 }
486
487 /*
488 * Same as above, but return the result value
489 */
490 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
491 {
492 unsigned long result;
493
494 smp_mb();
495
496 if (cpu_has_llsc && R10000_LLSC_WAR) {
497 unsigned long temp;
498
499 __asm__ __volatile__(
500 " .set mips3 \n"
501 "1: lld %1, %2 # atomic64_add_return \n"
502 " addu %0, %1, %3 \n"
503 " scd %0, %2 \n"
504 " beqzl %0, 1b \n"
505 " addu %0, %1, %3 \n"
506 " .set mips0 \n"
507 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
508 : "Ir" (i), "m" (v->counter)
509 : "memory");
510 } else if (cpu_has_llsc) {
511 unsigned long temp;
512
513 __asm__ __volatile__(
514 " .set mips3 \n"
515 "1: lld %1, %2 # atomic64_add_return \n"
516 " addu %0, %1, %3 \n"
517 " scd %0, %2 \n"
518 " beqz %0, 1b \n"
519 " addu %0, %1, %3 \n"
520 " .set mips0 \n"
521 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
522 : "Ir" (i), "m" (v->counter)
523 : "memory");
524 } else {
525 unsigned long flags;
526
527 local_irq_save(flags);
528 result = v->counter;
529 result += i;
530 v->counter = result;
531 local_irq_restore(flags);
532 }
533
534 smp_mb();
535
536 return result;
537 }
538
539 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
540 {
541 unsigned long result;
542
543 smp_mb();
544
545 if (cpu_has_llsc && R10000_LLSC_WAR) {
546 unsigned long temp;
547
548 __asm__ __volatile__(
549 " .set mips3 \n"
550 "1: lld %1, %2 # atomic64_sub_return \n"
551 " subu %0, %1, %3 \n"
552 " scd %0, %2 \n"
553 " beqzl %0, 1b \n"
554 " subu %0, %1, %3 \n"
555 " .set mips0 \n"
556 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
557 : "Ir" (i), "m" (v->counter)
558 : "memory");
559 } else if (cpu_has_llsc) {
560 unsigned long temp;
561
562 __asm__ __volatile__(
563 " .set mips3 \n"
564 "1: lld %1, %2 # atomic64_sub_return \n"
565 " subu %0, %1, %3 \n"
566 " scd %0, %2 \n"
567 " beqz %0, 1b \n"
568 " subu %0, %1, %3 \n"
569 " .set mips0 \n"
570 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
571 : "Ir" (i), "m" (v->counter)
572 : "memory");
573 } else {
574 unsigned long flags;
575
576 local_irq_save(flags);
577 result = v->counter;
578 result -= i;
579 v->counter = result;
580 local_irq_restore(flags);
581 }
582
583 smp_mb();
584
585 return result;
586 }
587
588 /*
589 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
590 * @i: integer value to subtract
591 * @v: pointer of type atomic64_t
592 *
593 * Atomically test @v and subtract @i if @v is greater or equal than @i.
594 * The function returns the old value of @v minus @i.
595 */
596 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
597 {
598 unsigned long result;
599
600 smp_mb();
601
602 if (cpu_has_llsc && R10000_LLSC_WAR) {
603 unsigned long temp;
604
605 __asm__ __volatile__(
606 " .set mips3 \n"
607 "1: lld %1, %2 # atomic64_sub_if_positive\n"
608 " dsubu %0, %1, %3 \n"
609 " bltz %0, 1f \n"
610 " scd %0, %2 \n"
611 " .set noreorder \n"
612 " beqzl %0, 1b \n"
613 " dsubu %0, %1, %3 \n"
614 " .set reorder \n"
615 "1: \n"
616 " .set mips0 \n"
617 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
618 : "Ir" (i), "m" (v->counter)
619 : "memory");
620 } else if (cpu_has_llsc) {
621 unsigned long temp;
622
623 __asm__ __volatile__(
624 " .set mips3 \n"
625 "1: lld %1, %2 # atomic64_sub_if_positive\n"
626 " dsubu %0, %1, %3 \n"
627 " bltz %0, 1f \n"
628 " scd %0, %2 \n"
629 " .set noreorder \n"
630 " beqz %0, 1b \n"
631 " dsubu %0, %1, %3 \n"
632 " .set reorder \n"
633 "1: \n"
634 " .set mips0 \n"
635 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
636 : "Ir" (i), "m" (v->counter)
637 : "memory");
638 } else {
639 unsigned long flags;
640
641 local_irq_save(flags);
642 result = v->counter;
643 result -= i;
644 if (result >= 0)
645 v->counter = result;
646 local_irq_restore(flags);
647 }
648
649 smp_mb();
650
651 return result;
652 }
653
654 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
655 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
656
657 /*
658 * atomic64_sub_and_test - subtract value from variable and test result
659 * @i: integer value to subtract
660 * @v: pointer of type atomic64_t
661 *
662 * Atomically subtracts @i from @v and returns
663 * true if the result is zero, or false for all
664 * other cases.
665 */
666 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
667
668 /*
669 * atomic64_inc_and_test - increment and test
670 * @v: pointer of type atomic64_t
671 *
672 * Atomically increments @v by 1
673 * and returns true if the result is zero, or false for all
674 * other cases.
675 */
676 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
677
678 /*
679 * atomic64_dec_and_test - decrement by 1 and test
680 * @v: pointer of type atomic64_t
681 *
682 * Atomically decrements @v by 1 and
683 * returns true if the result is 0, or false for all other
684 * cases.
685 */
686 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
687
688 /*
689 * atomic64_dec_if_positive - decrement by 1 if old value positive
690 * @v: pointer of type atomic64_t
691 */
692 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
693
694 /*
695 * atomic64_inc - increment atomic variable
696 * @v: pointer of type atomic64_t
697 *
698 * Atomically increments @v by 1.
699 */
700 #define atomic64_inc(v) atomic64_add(1,(v))
701
702 /*
703 * atomic64_dec - decrement and test
704 * @v: pointer of type atomic64_t
705 *
706 * Atomically decrements @v by 1.
707 */
708 #define atomic64_dec(v) atomic64_sub(1,(v))
709
710 /*
711 * atomic64_add_negative - add and test if negative
712 * @v: pointer of type atomic64_t
713 * @i: integer value to add
714 *
715 * Atomically adds @i to @v and returns true
716 * if the result is negative, or false when
717 * result is greater than or equal to zero.
718 */
719 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
720
721 #endif /* CONFIG_64BIT */
722
723 /*
724 * atomic*_return operations are serializing but not the non-*_return
725 * versions.
726 */
727 #define smp_mb__before_atomic_dec() smp_mb()
728 #define smp_mb__after_atomic_dec() smp_mb()
729 #define smp_mb__before_atomic_inc() smp_mb()
730 #define smp_mb__after_atomic_inc() smp_mb()
731
732 #include <asm-generic/atomic.h>
733 #endif /* _ASM_ATOMIC_H */
This page took 0.046275 seconds and 5 git commands to generate.