mempolicy: fix parsing of tmpfs mpol mount option
[deliverable/linux.git] / include / asm-mips / atomic.h
CommitLineData
1da177e4
LT
1/*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
e303e088 12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
1da177e4 13 */
1da177e4
LT
14#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
192ef366 17#include <linux/irqflags.h>
0004a9df 18#include <asm/barrier.h>
1da177e4
LT
19#include <asm/cpu-features.h>
20#include <asm/war.h>
2856f5e3 21#include <asm/system.h>
1da177e4 22
1da177e4
LT
23typedef struct { volatile int counter; } atomic_t;
24
25#define ATOMIC_INIT(i) { (i) }
26
27/*
28 * atomic_read - read atomic variable
29 * @v: pointer of type atomic_t
30 *
31 * Atomically reads the value of @v.
32 */
33#define atomic_read(v) ((v)->counter)
34
35/*
36 * atomic_set - set atomic variable
37 * @v: pointer of type atomic_t
38 * @i: required value
39 *
40 * Atomically sets the value of @v to @i.
41 */
21a151d8 42#define atomic_set(v, i) ((v)->counter = (i))
1da177e4
LT
43
44/*
45 * atomic_add - add integer to atomic variable
46 * @i: integer value to add
47 * @v: pointer of type atomic_t
48 *
49 * Atomically adds @i to @v.
50 */
51static __inline__ void atomic_add(int i, atomic_t * v)
52{
53 if (cpu_has_llsc && R10000_LLSC_WAR) {
54 unsigned long temp;
55
56 __asm__ __volatile__(
c4559f67 57 " .set mips3 \n"
1da177e4
LT
58 "1: ll %0, %1 # atomic_add \n"
59 " addu %0, %2 \n"
60 " sc %0, %1 \n"
61 " beqzl %0, 1b \n"
aac8aa77 62 " .set mips0 \n"
1da177e4
LT
63 : "=&r" (temp), "=m" (v->counter)
64 : "Ir" (i), "m" (v->counter));
65 } else if (cpu_has_llsc) {
66 unsigned long temp;
67
68 __asm__ __volatile__(
c4559f67 69 " .set mips3 \n"
1da177e4
LT
70 "1: ll %0, %1 # atomic_add \n"
71 " addu %0, %2 \n"
72 " sc %0, %1 \n"
f65e4fa8
RB
73 " beqz %0, 2f \n"
74 " .subsection 2 \n"
75 "2: b 1b \n"
76 " .previous \n"
aac8aa77 77 " .set mips0 \n"
1da177e4
LT
78 : "=&r" (temp), "=m" (v->counter)
79 : "Ir" (i), "m" (v->counter));
80 } else {
81 unsigned long flags;
82
49edd098 83 raw_local_irq_save(flags);
1da177e4 84 v->counter += i;
49edd098 85 raw_local_irq_restore(flags);
1da177e4
LT
86 }
87}
88
89/*
90 * atomic_sub - subtract the atomic variable
91 * @i: integer value to subtract
92 * @v: pointer of type atomic_t
93 *
94 * Atomically subtracts @i from @v.
95 */
96static __inline__ void atomic_sub(int i, atomic_t * v)
97{
98 if (cpu_has_llsc && R10000_LLSC_WAR) {
99 unsigned long temp;
100
101 __asm__ __volatile__(
c4559f67 102 " .set mips3 \n"
1da177e4
LT
103 "1: ll %0, %1 # atomic_sub \n"
104 " subu %0, %2 \n"
105 " sc %0, %1 \n"
106 " beqzl %0, 1b \n"
aac8aa77 107 " .set mips0 \n"
1da177e4
LT
108 : "=&r" (temp), "=m" (v->counter)
109 : "Ir" (i), "m" (v->counter));
110 } else if (cpu_has_llsc) {
111 unsigned long temp;
112
113 __asm__ __volatile__(
c4559f67 114 " .set mips3 \n"
1da177e4
LT
115 "1: ll %0, %1 # atomic_sub \n"
116 " subu %0, %2 \n"
117 " sc %0, %1 \n"
f65e4fa8
RB
118 " beqz %0, 2f \n"
119 " .subsection 2 \n"
120 "2: b 1b \n"
121 " .previous \n"
aac8aa77 122 " .set mips0 \n"
1da177e4
LT
123 : "=&r" (temp), "=m" (v->counter)
124 : "Ir" (i), "m" (v->counter));
125 } else {
126 unsigned long flags;
127
49edd098 128 raw_local_irq_save(flags);
1da177e4 129 v->counter -= i;
49edd098 130 raw_local_irq_restore(flags);
1da177e4
LT
131 }
132}
133
134/*
135 * Same as above, but return the result value
136 */
137static __inline__ int atomic_add_return(int i, atomic_t * v)
138{
139 unsigned long result;
140
17099b11 141 smp_llsc_mb();
0004a9df 142
1da177e4
LT
143 if (cpu_has_llsc && R10000_LLSC_WAR) {
144 unsigned long temp;
145
146 __asm__ __volatile__(
c4559f67 147 " .set mips3 \n"
1da177e4
LT
148 "1: ll %1, %2 # atomic_add_return \n"
149 " addu %0, %1, %3 \n"
150 " sc %0, %2 \n"
151 " beqzl %0, 1b \n"
152 " addu %0, %1, %3 \n"
aac8aa77 153 " .set mips0 \n"
1da177e4
LT
154 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
155 : "Ir" (i), "m" (v->counter)
156 : "memory");
157 } else if (cpu_has_llsc) {
158 unsigned long temp;
159
160 __asm__ __volatile__(
c4559f67 161 " .set mips3 \n"
1da177e4
LT
162 "1: ll %1, %2 # atomic_add_return \n"
163 " addu %0, %1, %3 \n"
164 " sc %0, %2 \n"
f65e4fa8 165 " beqz %0, 2f \n"
1da177e4 166 " addu %0, %1, %3 \n"
f65e4fa8
RB
167 " .subsection 2 \n"
168 "2: b 1b \n"
169 " .previous \n"
aac8aa77 170 " .set mips0 \n"
1da177e4
LT
171 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
172 : "Ir" (i), "m" (v->counter)
173 : "memory");
174 } else {
175 unsigned long flags;
176
49edd098 177 raw_local_irq_save(flags);
1da177e4
LT
178 result = v->counter;
179 result += i;
180 v->counter = result;
49edd098 181 raw_local_irq_restore(flags);
1da177e4
LT
182 }
183
17099b11 184 smp_llsc_mb();
0004a9df 185
1da177e4
LT
186 return result;
187}
188
189static __inline__ int atomic_sub_return(int i, atomic_t * v)
190{
191 unsigned long result;
192
17099b11 193 smp_llsc_mb();
0004a9df 194
1da177e4
LT
195 if (cpu_has_llsc && R10000_LLSC_WAR) {
196 unsigned long temp;
197
198 __asm__ __volatile__(
c4559f67 199 " .set mips3 \n"
1da177e4
LT
200 "1: ll %1, %2 # atomic_sub_return \n"
201 " subu %0, %1, %3 \n"
202 " sc %0, %2 \n"
203 " beqzl %0, 1b \n"
204 " subu %0, %1, %3 \n"
aac8aa77 205 " .set mips0 \n"
1da177e4
LT
206 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
207 : "Ir" (i), "m" (v->counter)
208 : "memory");
209 } else if (cpu_has_llsc) {
210 unsigned long temp;
211
212 __asm__ __volatile__(
c4559f67 213 " .set mips3 \n"
1da177e4
LT
214 "1: ll %1, %2 # atomic_sub_return \n"
215 " subu %0, %1, %3 \n"
216 " sc %0, %2 \n"
f65e4fa8 217 " beqz %0, 2f \n"
1da177e4 218 " subu %0, %1, %3 \n"
f65e4fa8
RB
219 " .subsection 2 \n"
220 "2: b 1b \n"
221 " .previous \n"
aac8aa77 222 " .set mips0 \n"
1da177e4
LT
223 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
224 : "Ir" (i), "m" (v->counter)
225 : "memory");
226 } else {
227 unsigned long flags;
228
49edd098 229 raw_local_irq_save(flags);
1da177e4
LT
230 result = v->counter;
231 result -= i;
232 v->counter = result;
49edd098 233 raw_local_irq_restore(flags);
1da177e4
LT
234 }
235
17099b11 236 smp_llsc_mb();
0004a9df 237
1da177e4
LT
238 return result;
239}
240
241/*
f10d14dd
AG
242 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
243 * @i: integer value to subtract
1da177e4
LT
244 * @v: pointer of type atomic_t
245 *
f10d14dd
AG
246 * Atomically test @v and subtract @i if @v is greater or equal than @i.
247 * The function returns the old value of @v minus @i.
1da177e4
LT
248 */
249static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
250{
251 unsigned long result;
252
17099b11 253 smp_llsc_mb();
0004a9df 254
1da177e4
LT
255 if (cpu_has_llsc && R10000_LLSC_WAR) {
256 unsigned long temp;
257
258 __asm__ __volatile__(
c4559f67 259 " .set mips3 \n"
1da177e4
LT
260 "1: ll %1, %2 # atomic_sub_if_positive\n"
261 " subu %0, %1, %3 \n"
262 " bltz %0, 1f \n"
263 " sc %0, %2 \n"
92f22c18 264 " .set noreorder \n"
1da177e4 265 " beqzl %0, 1b \n"
92f22c18
RB
266 " subu %0, %1, %3 \n"
267 " .set reorder \n"
1da177e4 268 "1: \n"
aac8aa77 269 " .set mips0 \n"
1da177e4
LT
270 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
271 : "Ir" (i), "m" (v->counter)
272 : "memory");
273 } else if (cpu_has_llsc) {
274 unsigned long temp;
275
276 __asm__ __volatile__(
c4559f67 277 " .set mips3 \n"
1da177e4
LT
278 "1: ll %1, %2 # atomic_sub_if_positive\n"
279 " subu %0, %1, %3 \n"
280 " bltz %0, 1f \n"
281 " sc %0, %2 \n"
92f22c18 282 " .set noreorder \n"
f65e4fa8 283 " beqz %0, 2f \n"
92f22c18
RB
284 " subu %0, %1, %3 \n"
285 " .set reorder \n"
1da177e4 286 "1: \n"
f65e4fa8
RB
287 " .subsection 2 \n"
288 "2: b 1b \n"
289 " .previous \n"
aac8aa77 290 " .set mips0 \n"
1da177e4
LT
291 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
292 : "Ir" (i), "m" (v->counter)
293 : "memory");
294 } else {
295 unsigned long flags;
296
49edd098 297 raw_local_irq_save(flags);
1da177e4
LT
298 result = v->counter;
299 result -= i;
300 if (result >= 0)
301 v->counter = result;
49edd098 302 raw_local_irq_restore(flags);
1da177e4
LT
303 }
304
17099b11 305 smp_llsc_mb();
0004a9df 306
1da177e4
LT
307 return result;
308}
309
e12f644b
MD
310#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
311#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
4a6dae6d 312
8426e1f6
NP
313/**
314 * atomic_add_unless - add unless the number is a given value
315 * @v: pointer of type atomic_t
316 * @a: the amount to add to v...
317 * @u: ...unless v is equal to u.
318 *
319 * Atomically adds @a to @v, so long as it was not @u.
320 * Returns non-zero if @v was not @u, and zero otherwise.
321 */
2856f5e3
MD
322static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
323{
324 int c, old;
325 c = atomic_read(v);
326 for (;;) {
327 if (unlikely(c == (u)))
328 break;
329 old = atomic_cmpxchg((v), c, c + (a));
330 if (likely(old == c))
331 break;
332 c = old;
333 }
334 return c != (u);
335}
8426e1f6
NP
336#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
337
21a151d8
RB
338#define atomic_dec_return(v) atomic_sub_return(1, (v))
339#define atomic_inc_return(v) atomic_add_return(1, (v))
1da177e4
LT
340
341/*
342 * atomic_sub_and_test - subtract value from variable and test result
343 * @i: integer value to subtract
344 * @v: pointer of type atomic_t
345 *
346 * Atomically subtracts @i from @v and returns
347 * true if the result is zero, or false for all
348 * other cases.
349 */
21a151d8 350#define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
1da177e4
LT
351
352/*
353 * atomic_inc_and_test - increment and test
354 * @v: pointer of type atomic_t
355 *
356 * Atomically increments @v by 1
357 * and returns true if the result is zero, or false for all
358 * other cases.
359 */
360#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
361
362/*
363 * atomic_dec_and_test - decrement by 1 and test
364 * @v: pointer of type atomic_t
365 *
366 * Atomically decrements @v by 1 and
367 * returns true if the result is 0, or false for all other
368 * cases.
369 */
370#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
371
372/*
373 * atomic_dec_if_positive - decrement by 1 if old value positive
374 * @v: pointer of type atomic_t
375 */
376#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
377
378/*
379 * atomic_inc - increment atomic variable
380 * @v: pointer of type atomic_t
381 *
382 * Atomically increments @v by 1.
383 */
21a151d8 384#define atomic_inc(v) atomic_add(1, (v))
1da177e4
LT
385
386/*
387 * atomic_dec - decrement and test
388 * @v: pointer of type atomic_t
389 *
390 * Atomically decrements @v by 1.
391 */
21a151d8 392#define atomic_dec(v) atomic_sub(1, (v))
1da177e4
LT
393
394/*
395 * atomic_add_negative - add and test if negative
396 * @v: pointer of type atomic_t
397 * @i: integer value to add
398 *
399 * Atomically adds @i to @v and returns true
400 * if the result is negative, or false when
401 * result is greater than or equal to zero.
402 */
21a151d8 403#define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
1da177e4 404
875d43e7 405#ifdef CONFIG_64BIT
1da177e4 406
4f8b5c70 407typedef struct { volatile long counter; } atomic64_t;
1da177e4
LT
408
409#define ATOMIC64_INIT(i) { (i) }
410
411/*
412 * atomic64_read - read atomic variable
413 * @v: pointer of type atomic64_t
414 *
415 */
416#define atomic64_read(v) ((v)->counter)
417
418/*
419 * atomic64_set - set atomic variable
420 * @v: pointer of type atomic64_t
421 * @i: required value
422 */
21a151d8 423#define atomic64_set(v, i) ((v)->counter = (i))
1da177e4
LT
424
425/*
426 * atomic64_add - add integer to atomic variable
427 * @i: integer value to add
428 * @v: pointer of type atomic64_t
429 *
430 * Atomically adds @i to @v.
431 */
432static __inline__ void atomic64_add(long i, atomic64_t * v)
433{
434 if (cpu_has_llsc && R10000_LLSC_WAR) {
435 unsigned long temp;
436
437 __asm__ __volatile__(
aac8aa77 438 " .set mips3 \n"
1da177e4
LT
439 "1: lld %0, %1 # atomic64_add \n"
440 " addu %0, %2 \n"
441 " scd %0, %1 \n"
442 " beqzl %0, 1b \n"
aac8aa77 443 " .set mips0 \n"
1da177e4
LT
444 : "=&r" (temp), "=m" (v->counter)
445 : "Ir" (i), "m" (v->counter));
446 } else if (cpu_has_llsc) {
447 unsigned long temp;
448
449 __asm__ __volatile__(
aac8aa77 450 " .set mips3 \n"
1da177e4
LT
451 "1: lld %0, %1 # atomic64_add \n"
452 " addu %0, %2 \n"
453 " scd %0, %1 \n"
f65e4fa8
RB
454 " beqz %0, 2f \n"
455 " .subsection 2 \n"
456 "2: b 1b \n"
457 " .previous \n"
aac8aa77 458 " .set mips0 \n"
1da177e4
LT
459 : "=&r" (temp), "=m" (v->counter)
460 : "Ir" (i), "m" (v->counter));
461 } else {
462 unsigned long flags;
463
49edd098 464 raw_local_irq_save(flags);
1da177e4 465 v->counter += i;
49edd098 466 raw_local_irq_restore(flags);
1da177e4
LT
467 }
468}
469
470/*
471 * atomic64_sub - subtract the atomic variable
472 * @i: integer value to subtract
473 * @v: pointer of type atomic64_t
474 *
475 * Atomically subtracts @i from @v.
476 */
477static __inline__ void atomic64_sub(long i, atomic64_t * v)
478{
479 if (cpu_has_llsc && R10000_LLSC_WAR) {
480 unsigned long temp;
481
482 __asm__ __volatile__(
aac8aa77 483 " .set mips3 \n"
1da177e4
LT
484 "1: lld %0, %1 # atomic64_sub \n"
485 " subu %0, %2 \n"
486 " scd %0, %1 \n"
487 " beqzl %0, 1b \n"
aac8aa77 488 " .set mips0 \n"
1da177e4
LT
489 : "=&r" (temp), "=m" (v->counter)
490 : "Ir" (i), "m" (v->counter));
491 } else if (cpu_has_llsc) {
492 unsigned long temp;
493
494 __asm__ __volatile__(
aac8aa77 495 " .set mips3 \n"
1da177e4
LT
496 "1: lld %0, %1 # atomic64_sub \n"
497 " subu %0, %2 \n"
498 " scd %0, %1 \n"
f65e4fa8
RB
499 " beqz %0, 2f \n"
500 " .subsection 2 \n"
501 "2: b 1b \n"
502 " .previous \n"
aac8aa77 503 " .set mips0 \n"
1da177e4
LT
504 : "=&r" (temp), "=m" (v->counter)
505 : "Ir" (i), "m" (v->counter));
506 } else {
507 unsigned long flags;
508
49edd098 509 raw_local_irq_save(flags);
1da177e4 510 v->counter -= i;
49edd098 511 raw_local_irq_restore(flags);
1da177e4
LT
512 }
513}
514
515/*
516 * Same as above, but return the result value
517 */
518static __inline__ long atomic64_add_return(long i, atomic64_t * v)
519{
520 unsigned long result;
521
17099b11 522 smp_llsc_mb();
0004a9df 523
1da177e4
LT
524 if (cpu_has_llsc && R10000_LLSC_WAR) {
525 unsigned long temp;
526
527 __asm__ __volatile__(
aac8aa77 528 " .set mips3 \n"
1da177e4
LT
529 "1: lld %1, %2 # atomic64_add_return \n"
530 " addu %0, %1, %3 \n"
531 " scd %0, %2 \n"
532 " beqzl %0, 1b \n"
533 " addu %0, %1, %3 \n"
aac8aa77 534 " .set mips0 \n"
1da177e4
LT
535 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
536 : "Ir" (i), "m" (v->counter)
537 : "memory");
538 } else if (cpu_has_llsc) {
539 unsigned long temp;
540
541 __asm__ __volatile__(
aac8aa77 542 " .set mips3 \n"
1da177e4
LT
543 "1: lld %1, %2 # atomic64_add_return \n"
544 " addu %0, %1, %3 \n"
545 " scd %0, %2 \n"
f65e4fa8 546 " beqz %0, 2f \n"
1da177e4 547 " addu %0, %1, %3 \n"
f65e4fa8
RB
548 " .subsection 2 \n"
549 "2: b 1b \n"
550 " .previous \n"
aac8aa77 551 " .set mips0 \n"
1da177e4
LT
552 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
553 : "Ir" (i), "m" (v->counter)
554 : "memory");
555 } else {
556 unsigned long flags;
557
49edd098 558 raw_local_irq_save(flags);
1da177e4
LT
559 result = v->counter;
560 result += i;
561 v->counter = result;
49edd098 562 raw_local_irq_restore(flags);
1da177e4
LT
563 }
564
17099b11 565 smp_llsc_mb();
0004a9df 566
1da177e4
LT
567 return result;
568}
569
570static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
571{
572 unsigned long result;
573
17099b11 574 smp_llsc_mb();
0004a9df 575
1da177e4
LT
576 if (cpu_has_llsc && R10000_LLSC_WAR) {
577 unsigned long temp;
578
579 __asm__ __volatile__(
aac8aa77 580 " .set mips3 \n"
1da177e4
LT
581 "1: lld %1, %2 # atomic64_sub_return \n"
582 " subu %0, %1, %3 \n"
583 " scd %0, %2 \n"
584 " beqzl %0, 1b \n"
585 " subu %0, %1, %3 \n"
aac8aa77 586 " .set mips0 \n"
1da177e4
LT
587 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
588 : "Ir" (i), "m" (v->counter)
589 : "memory");
590 } else if (cpu_has_llsc) {
591 unsigned long temp;
592
593 __asm__ __volatile__(
aac8aa77 594 " .set mips3 \n"
1da177e4
LT
595 "1: lld %1, %2 # atomic64_sub_return \n"
596 " subu %0, %1, %3 \n"
597 " scd %0, %2 \n"
f65e4fa8 598 " beqz %0, 2f \n"
1da177e4 599 " subu %0, %1, %3 \n"
f65e4fa8
RB
600 " .subsection 2 \n"
601 "2: b 1b \n"
602 " .previous \n"
aac8aa77 603 " .set mips0 \n"
1da177e4
LT
604 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
605 : "Ir" (i), "m" (v->counter)
606 : "memory");
607 } else {
608 unsigned long flags;
609
49edd098 610 raw_local_irq_save(flags);
1da177e4
LT
611 result = v->counter;
612 result -= i;
613 v->counter = result;
49edd098 614 raw_local_irq_restore(flags);
1da177e4
LT
615 }
616
17099b11 617 smp_llsc_mb();
0004a9df 618
1da177e4
LT
619 return result;
620}
621
622/*
f10d14dd
AG
623 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
624 * @i: integer value to subtract
1da177e4
LT
625 * @v: pointer of type atomic64_t
626 *
f10d14dd
AG
627 * Atomically test @v and subtract @i if @v is greater or equal than @i.
628 * The function returns the old value of @v minus @i.
1da177e4
LT
629 */
630static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
631{
632 unsigned long result;
633
17099b11 634 smp_llsc_mb();
0004a9df 635
1da177e4
LT
636 if (cpu_has_llsc && R10000_LLSC_WAR) {
637 unsigned long temp;
638
639 __asm__ __volatile__(
aac8aa77 640 " .set mips3 \n"
1da177e4
LT
641 "1: lld %1, %2 # atomic64_sub_if_positive\n"
642 " dsubu %0, %1, %3 \n"
643 " bltz %0, 1f \n"
644 " scd %0, %2 \n"
92f22c18 645 " .set noreorder \n"
1da177e4 646 " beqzl %0, 1b \n"
92f22c18
RB
647 " dsubu %0, %1, %3 \n"
648 " .set reorder \n"
1da177e4 649 "1: \n"
aac8aa77 650 " .set mips0 \n"
1da177e4
LT
651 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
652 : "Ir" (i), "m" (v->counter)
653 : "memory");
654 } else if (cpu_has_llsc) {
655 unsigned long temp;
656
657 __asm__ __volatile__(
aac8aa77 658 " .set mips3 \n"
1da177e4
LT
659 "1: lld %1, %2 # atomic64_sub_if_positive\n"
660 " dsubu %0, %1, %3 \n"
661 " bltz %0, 1f \n"
662 " scd %0, %2 \n"
92f22c18 663 " .set noreorder \n"
f65e4fa8 664 " beqz %0, 2f \n"
92f22c18
RB
665 " dsubu %0, %1, %3 \n"
666 " .set reorder \n"
1da177e4 667 "1: \n"
f65e4fa8
RB
668 " .subsection 2 \n"
669 "2: b 1b \n"
670 " .previous \n"
aac8aa77 671 " .set mips0 \n"
1da177e4
LT
672 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
673 : "Ir" (i), "m" (v->counter)
674 : "memory");
675 } else {
676 unsigned long flags;
677
49edd098 678 raw_local_irq_save(flags);
1da177e4
LT
679 result = v->counter;
680 result -= i;
681 if (result >= 0)
682 v->counter = result;
49edd098 683 raw_local_irq_restore(flags);
1da177e4
LT
684 }
685
17099b11 686 smp_llsc_mb();
0004a9df 687
1da177e4
LT
688 return result;
689}
690
e12f644b 691#define atomic64_cmpxchg(v, o, n) \
7b239bb1 692 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
e12f644b
MD
693#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
694
695/**
696 * atomic64_add_unless - add unless the number is a given value
697 * @v: pointer of type atomic64_t
698 * @a: the amount to add to v...
699 * @u: ...unless v is equal to u.
700 *
701 * Atomically adds @a to @v, so long as it was not @u.
702 * Returns non-zero if @v was not @u, and zero otherwise.
703 */
2856f5e3
MD
704static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
705{
706 long c, old;
707 c = atomic64_read(v);
708 for (;;) {
709 if (unlikely(c == (u)))
710 break;
711 old = atomic64_cmpxchg((v), c, c + (a));
712 if (likely(old == c))
713 break;
714 c = old;
715 }
716 return c != (u);
717}
718
e12f644b
MD
719#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
720
21a151d8
RB
721#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
722#define atomic64_inc_return(v) atomic64_add_return(1, (v))
1da177e4
LT
723
724/*
725 * atomic64_sub_and_test - subtract value from variable and test result
726 * @i: integer value to subtract
727 * @v: pointer of type atomic64_t
728 *
729 * Atomically subtracts @i from @v and returns
730 * true if the result is zero, or false for all
731 * other cases.
732 */
21a151d8 733#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
1da177e4
LT
734
735/*
736 * atomic64_inc_and_test - increment and test
737 * @v: pointer of type atomic64_t
738 *
739 * Atomically increments @v by 1
740 * and returns true if the result is zero, or false for all
741 * other cases.
742 */
743#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
744
745/*
746 * atomic64_dec_and_test - decrement by 1 and test
747 * @v: pointer of type atomic64_t
748 *
749 * Atomically decrements @v by 1 and
750 * returns true if the result is 0, or false for all other
751 * cases.
752 */
753#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
754
755/*
756 * atomic64_dec_if_positive - decrement by 1 if old value positive
757 * @v: pointer of type atomic64_t
758 */
759#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
760
761/*
762 * atomic64_inc - increment atomic variable
763 * @v: pointer of type atomic64_t
764 *
765 * Atomically increments @v by 1.
766 */
21a151d8 767#define atomic64_inc(v) atomic64_add(1, (v))
1da177e4
LT
768
769/*
770 * atomic64_dec - decrement and test
771 * @v: pointer of type atomic64_t
772 *
773 * Atomically decrements @v by 1.
774 */
21a151d8 775#define atomic64_dec(v) atomic64_sub(1, (v))
1da177e4
LT
776
777/*
778 * atomic64_add_negative - add and test if negative
779 * @v: pointer of type atomic64_t
780 * @i: integer value to add
781 *
782 * Atomically adds @i to @v and returns true
783 * if the result is negative, or false when
784 * result is greater than or equal to zero.
785 */
21a151d8 786#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
1da177e4 787
875d43e7 788#endif /* CONFIG_64BIT */
1da177e4
LT
789
790/*
791 * atomic*_return operations are serializing but not the non-*_return
792 * versions.
793 */
17099b11
RB
794#define smp_mb__before_atomic_dec() smp_llsc_mb()
795#define smp_mb__after_atomic_dec() smp_llsc_mb()
796#define smp_mb__before_atomic_inc() smp_llsc_mb()
797#define smp_mb__after_atomic_inc() smp_llsc_mb()
1da177e4 798
d3cb4871 799#include <asm-generic/atomic.h>
17099b11 800
1da177e4 801#endif /* _ASM_ATOMIC_H */
This page took 0.381601 seconds and 5 git commands to generate.