[PATCH] md: locking fix
[deliverable/linux.git] / arch / sparc64 / lib / atomic.S
CommitLineData
1da177e4
LT
1/* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $
2 * atomic.S: These things are too big to do inline.
3 *
4 * Copyright (C) 1999 David S. Miller (davem@redhat.com)
5 */
6
7#include <linux/config.h>
8#include <asm/asi.h>
9
1da177e4
LT
10 .text
11
12 /* Two versions of the atomic routines, one that
13 * does not return a value and does not perform
14 * memory barriers, and a second which returns
15 * a value and does the barriers.
16 */
17 .globl atomic_add
18 .type atomic_add,#function
19atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
201: lduw [%o1], %g1
21 add %g1, %o0, %g7
22 cas [%o1], %g1, %g7
23 cmp %g1, %g7
24 bne,pn %icc, 1b
25 nop
26 retl
27 nop
28 .size atomic_add, .-atomic_add
29
30 .globl atomic_sub
31 .type atomic_sub,#function
32atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
331: lduw [%o1], %g1
34 sub %g1, %o0, %g7
35 cas [%o1], %g1, %g7
36 cmp %g1, %g7
37 bne,pn %icc, 1b
38 nop
39 retl
40 nop
41 .size atomic_sub, .-atomic_sub
42
b445e26c
DM
43 /* On SMP we need to use memory barriers to ensure
44 * correct memory operation ordering, nop these out
45 * for uniprocessor.
46 */
47#ifdef CONFIG_SMP
48
49#define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad;
50#define ATOMIC_POST_BARRIER \
51 ba,pt %xcc, 80b; \
52 membar #StoreLoad | #StoreStore
53
5480: retl
55 nop
56#else
57#define ATOMIC_PRE_BARRIER
58#define ATOMIC_POST_BARRIER
59#endif
60
1da177e4
LT
61 .globl atomic_add_ret
62 .type atomic_add_ret,#function
63atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
64 ATOMIC_PRE_BARRIER
651: lduw [%o1], %g1
66 add %g1, %o0, %g7
67 cas [%o1], %g1, %g7
68 cmp %g1, %g7
69 bne,pn %icc, 1b
70 add %g7, %o0, %g7
b445e26c 71 sra %g7, 0, %o0
1da177e4
LT
72 ATOMIC_POST_BARRIER
73 retl
b445e26c 74 nop
1da177e4
LT
75 .size atomic_add_ret, .-atomic_add_ret
76
77 .globl atomic_sub_ret
78 .type atomic_sub_ret,#function
79atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
80 ATOMIC_PRE_BARRIER
811: lduw [%o1], %g1
82 sub %g1, %o0, %g7
83 cas [%o1], %g1, %g7
84 cmp %g1, %g7
85 bne,pn %icc, 1b
86 sub %g7, %o0, %g7
b445e26c 87 sra %g7, 0, %o0
1da177e4
LT
88 ATOMIC_POST_BARRIER
89 retl
b445e26c 90 nop
1da177e4
LT
91 .size atomic_sub_ret, .-atomic_sub_ret
92
93 .globl atomic64_add
94 .type atomic64_add,#function
95atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
961: ldx [%o1], %g1
97 add %g1, %o0, %g7
98 casx [%o1], %g1, %g7
99 cmp %g1, %g7
100 bne,pn %xcc, 1b
101 nop
102 retl
103 nop
104 .size atomic64_add, .-atomic64_add
105
106 .globl atomic64_sub
107 .type atomic64_sub,#function
108atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
1091: ldx [%o1], %g1
110 sub %g1, %o0, %g7
111 casx [%o1], %g1, %g7
112 cmp %g1, %g7
113 bne,pn %xcc, 1b
114 nop
115 retl
116 nop
117 .size atomic64_sub, .-atomic64_sub
118
119 .globl atomic64_add_ret
120 .type atomic64_add_ret,#function
121atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
122 ATOMIC_PRE_BARRIER
1231: ldx [%o1], %g1
124 add %g1, %o0, %g7
125 casx [%o1], %g1, %g7
126 cmp %g1, %g7
127 bne,pn %xcc, 1b
128 add %g7, %o0, %g7
b445e26c 129 mov %g7, %o0
1da177e4
LT
130 ATOMIC_POST_BARRIER
131 retl
b445e26c 132 nop
1da177e4
LT
133 .size atomic64_add_ret, .-atomic64_add_ret
134
135 .globl atomic64_sub_ret
136 .type atomic64_sub_ret,#function
137atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
138 ATOMIC_PRE_BARRIER
1391: ldx [%o1], %g1
140 sub %g1, %o0, %g7
141 casx [%o1], %g1, %g7
142 cmp %g1, %g7
143 bne,pn %xcc, 1b
144 sub %g7, %o0, %g7
b445e26c 145 mov %g7, %o0
1da177e4
LT
146 ATOMIC_POST_BARRIER
147 retl
b445e26c 148 nop
1da177e4 149 .size atomic64_sub_ret, .-atomic64_sub_ret
This page took 0.164647 seconds and 5 git commands to generate.