Merge branch 'core-fixes-for-linus' of git://git.kernel.org/pub/scm/linux/kernel...
[deliverable/linux.git] / arch / sparc64 / lib / atomic.S
CommitLineData
24f287e4 1/* atomic.S: These things are too big to do inline.
1da177e4 2 *
24f287e4 3 * Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net)
1da177e4
LT
4 */
5
1da177e4 6#include <asm/asi.h>
24f287e4 7#include <asm/backoff.h>
1da177e4 8
1da177e4
LT
9 .text
10
11 /* Two versions of the atomic routines, one that
12 * does not return a value and does not perform
13 * memory barriers, and a second which returns
14 * a value and does the barriers.
15 */
16 .globl atomic_add
17 .type atomic_add,#function
18atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
24f287e4 19 BACKOFF_SETUP(%o2)
1da177e4
LT
201: lduw [%o1], %g1
21 add %g1, %o0, %g7
22 cas [%o1], %g1, %g7
23 cmp %g1, %g7
24f287e4 24 bne,pn %icc, 2f
1da177e4
LT
25 nop
26 retl
27 nop
24f287e4 282: BACKOFF_SPIN(%o2, %o3, 1b)
1da177e4
LT
29 .size atomic_add, .-atomic_add
30
31 .globl atomic_sub
32 .type atomic_sub,#function
33atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
24f287e4 34 BACKOFF_SETUP(%o2)
1da177e4
LT
351: lduw [%o1], %g1
36 sub %g1, %o0, %g7
37 cas [%o1], %g1, %g7
38 cmp %g1, %g7
24f287e4 39 bne,pn %icc, 2f
1da177e4
LT
40 nop
41 retl
42 nop
24f287e4 432: BACKOFF_SPIN(%o2, %o3, 1b)
1da177e4
LT
44 .size atomic_sub, .-atomic_sub
45
b445e26c
DM
46 /* On SMP we need to use memory barriers to ensure
47 * correct memory operation ordering, nop these out
48 * for uniprocessor.
49 */
50#ifdef CONFIG_SMP
51
52#define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad;
53#define ATOMIC_POST_BARRIER \
54 ba,pt %xcc, 80b; \
55 membar #StoreLoad | #StoreStore
56
5780: retl
58 nop
59#else
60#define ATOMIC_PRE_BARRIER
61#define ATOMIC_POST_BARRIER
62#endif
63
1da177e4
LT
64 .globl atomic_add_ret
65 .type atomic_add_ret,#function
66atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
24f287e4 67 BACKOFF_SETUP(%o2)
1da177e4
LT
68 ATOMIC_PRE_BARRIER
691: lduw [%o1], %g1
70 add %g1, %o0, %g7
71 cas [%o1], %g1, %g7
72 cmp %g1, %g7
24f287e4 73 bne,pn %icc, 2f
1da177e4 74 add %g7, %o0, %g7
b445e26c 75 sra %g7, 0, %o0
1da177e4
LT
76 ATOMIC_POST_BARRIER
77 retl
b445e26c 78 nop
24f287e4 792: BACKOFF_SPIN(%o2, %o3, 1b)
1da177e4
LT
80 .size atomic_add_ret, .-atomic_add_ret
81
82 .globl atomic_sub_ret
83 .type atomic_sub_ret,#function
84atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
24f287e4 85 BACKOFF_SETUP(%o2)
1da177e4
LT
86 ATOMIC_PRE_BARRIER
871: lduw [%o1], %g1
88 sub %g1, %o0, %g7
89 cas [%o1], %g1, %g7
90 cmp %g1, %g7
24f287e4 91 bne,pn %icc, 2f
1da177e4 92 sub %g7, %o0, %g7
b445e26c 93 sra %g7, 0, %o0
1da177e4
LT
94 ATOMIC_POST_BARRIER
95 retl
b445e26c 96 nop
24f287e4 972: BACKOFF_SPIN(%o2, %o3, 1b)
1da177e4
LT
98 .size atomic_sub_ret, .-atomic_sub_ret
99
100 .globl atomic64_add
101 .type atomic64_add,#function
102atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
24f287e4 103 BACKOFF_SETUP(%o2)
1da177e4
LT
1041: ldx [%o1], %g1
105 add %g1, %o0, %g7
106 casx [%o1], %g1, %g7
107 cmp %g1, %g7
24f287e4 108 bne,pn %xcc, 2f
1da177e4
LT
109 nop
110 retl
111 nop
24f287e4 1122: BACKOFF_SPIN(%o2, %o3, 1b)
1da177e4
LT
113 .size atomic64_add, .-atomic64_add
114
115 .globl atomic64_sub
116 .type atomic64_sub,#function
117atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
24f287e4 118 BACKOFF_SETUP(%o2)
1da177e4
LT
1191: ldx [%o1], %g1
120 sub %g1, %o0, %g7
121 casx [%o1], %g1, %g7
122 cmp %g1, %g7
24f287e4 123 bne,pn %xcc, 2f
1da177e4
LT
124 nop
125 retl
126 nop
24f287e4 1272: BACKOFF_SPIN(%o2, %o3, 1b)
1da177e4
LT
128 .size atomic64_sub, .-atomic64_sub
129
130 .globl atomic64_add_ret
131 .type atomic64_add_ret,#function
132atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
24f287e4 133 BACKOFF_SETUP(%o2)
1da177e4
LT
134 ATOMIC_PRE_BARRIER
1351: ldx [%o1], %g1
136 add %g1, %o0, %g7
137 casx [%o1], %g1, %g7
138 cmp %g1, %g7
24f287e4 139 bne,pn %xcc, 2f
1da177e4 140 add %g7, %o0, %g7
b445e26c 141 mov %g7, %o0
1da177e4
LT
142 ATOMIC_POST_BARRIER
143 retl
b445e26c 144 nop
24f287e4 1452: BACKOFF_SPIN(%o2, %o3, 1b)
1da177e4
LT
146 .size atomic64_add_ret, .-atomic64_add_ret
147
148 .globl atomic64_sub_ret
149 .type atomic64_sub_ret,#function
150atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
24f287e4 151 BACKOFF_SETUP(%o2)
1da177e4
LT
152 ATOMIC_PRE_BARRIER
1531: ldx [%o1], %g1
154 sub %g1, %o0, %g7
155 casx [%o1], %g1, %g7
156 cmp %g1, %g7
24f287e4 157 bne,pn %xcc, 2f
1da177e4 158 sub %g7, %o0, %g7
b445e26c 159 mov %g7, %o0
1da177e4
LT
160 ATOMIC_POST_BARRIER
161 retl
b445e26c 162 nop
24f287e4 1632: BACKOFF_SPIN(%o2, %o3, 1b)
1da177e4 164 .size atomic64_sub_ret, .-atomic64_sub_ret
This page took 0.316707 seconds and 5 git commands to generate.