Commit | Line | Data |
---|---|---|
feaf7cf1 BB |
1 | #ifndef _ASM_POWERPC_ATOMIC_H_ |
2 | #define _ASM_POWERPC_ATOMIC_H_ | |
3 | ||
1da177e4 LT |
4 | /* |
5 | * PowerPC atomic operations | |
6 | */ | |
7 | ||
1da177e4 LT |
8 | typedef struct { volatile int counter; } atomic_t; |
9 | ||
10 | #ifdef __KERNEL__ | |
feaf7cf1 | 11 | #include <asm/synch.h> |
3ddfbcf1 | 12 | #include <asm/asm-compat.h> |
1da177e4 | 13 | |
feaf7cf1 | 14 | #define ATOMIC_INIT(i) { (i) } |
1da177e4 LT |
15 | |
16 | #define atomic_read(v) ((v)->counter) | |
17 | #define atomic_set(v,i) (((v)->counter) = (i)) | |
18 | ||
1da177e4 LT |
19 | static __inline__ void atomic_add(int a, atomic_t *v) |
20 | { | |
21 | int t; | |
22 | ||
23 | __asm__ __volatile__( | |
24 | "1: lwarx %0,0,%3 # atomic_add\n\ | |
25 | add %0,%2,%0\n" | |
26 | PPC405_ERR77(0,%3) | |
27 | " stwcx. %0,0,%3 \n\ | |
28 | bne- 1b" | |
29 | : "=&r" (t), "=m" (v->counter) | |
30 | : "r" (a), "r" (&v->counter), "m" (v->counter) | |
31 | : "cc"); | |
32 | } | |
33 | ||
34 | static __inline__ int atomic_add_return(int a, atomic_t *v) | |
35 | { | |
36 | int t; | |
37 | ||
38 | __asm__ __volatile__( | |
feaf7cf1 | 39 | EIEIO_ON_SMP |
1da177e4 LT |
40 | "1: lwarx %0,0,%2 # atomic_add_return\n\ |
41 | add %0,%1,%0\n" | |
42 | PPC405_ERR77(0,%2) | |
43 | " stwcx. %0,0,%2 \n\ | |
44 | bne- 1b" | |
feaf7cf1 | 45 | ISYNC_ON_SMP |
1da177e4 LT |
46 | : "=&r" (t) |
47 | : "r" (a), "r" (&v->counter) | |
48 | : "cc", "memory"); | |
49 | ||
50 | return t; | |
51 | } | |
52 | ||
53 | #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) | |
54 | ||
55 | static __inline__ void atomic_sub(int a, atomic_t *v) | |
56 | { | |
57 | int t; | |
58 | ||
59 | __asm__ __volatile__( | |
60 | "1: lwarx %0,0,%3 # atomic_sub\n\ | |
61 | subf %0,%2,%0\n" | |
62 | PPC405_ERR77(0,%3) | |
63 | " stwcx. %0,0,%3 \n\ | |
64 | bne- 1b" | |
65 | : "=&r" (t), "=m" (v->counter) | |
66 | : "r" (a), "r" (&v->counter), "m" (v->counter) | |
67 | : "cc"); | |
68 | } | |
69 | ||
70 | static __inline__ int atomic_sub_return(int a, atomic_t *v) | |
71 | { | |
72 | int t; | |
73 | ||
74 | __asm__ __volatile__( | |
feaf7cf1 | 75 | EIEIO_ON_SMP |
1da177e4 LT |
76 | "1: lwarx %0,0,%2 # atomic_sub_return\n\ |
77 | subf %0,%1,%0\n" | |
78 | PPC405_ERR77(0,%2) | |
79 | " stwcx. %0,0,%2 \n\ | |
80 | bne- 1b" | |
feaf7cf1 | 81 | ISYNC_ON_SMP |
1da177e4 LT |
82 | : "=&r" (t) |
83 | : "r" (a), "r" (&v->counter) | |
84 | : "cc", "memory"); | |
85 | ||
86 | return t; | |
87 | } | |
88 | ||
89 | static __inline__ void atomic_inc(atomic_t *v) | |
90 | { | |
91 | int t; | |
92 | ||
93 | __asm__ __volatile__( | |
94 | "1: lwarx %0,0,%2 # atomic_inc\n\ | |
95 | addic %0,%0,1\n" | |
96 | PPC405_ERR77(0,%2) | |
97 | " stwcx. %0,0,%2 \n\ | |
98 | bne- 1b" | |
99 | : "=&r" (t), "=m" (v->counter) | |
100 | : "r" (&v->counter), "m" (v->counter) | |
101 | : "cc"); | |
102 | } | |
103 | ||
104 | static __inline__ int atomic_inc_return(atomic_t *v) | |
105 | { | |
106 | int t; | |
107 | ||
108 | __asm__ __volatile__( | |
feaf7cf1 | 109 | EIEIO_ON_SMP |
1da177e4 LT |
110 | "1: lwarx %0,0,%1 # atomic_inc_return\n\ |
111 | addic %0,%0,1\n" | |
112 | PPC405_ERR77(0,%1) | |
113 | " stwcx. %0,0,%1 \n\ | |
114 | bne- 1b" | |
feaf7cf1 | 115 | ISYNC_ON_SMP |
1da177e4 LT |
116 | : "=&r" (t) |
117 | : "r" (&v->counter) | |
118 | : "cc", "memory"); | |
119 | ||
120 | return t; | |
121 | } | |
122 | ||
123 | /* | |
124 | * atomic_inc_and_test - increment and test | |
125 | * @v: pointer of type atomic_t | |
126 | * | |
127 | * Atomically increments @v by 1 | |
128 | * and returns true if the result is zero, or false for all | |
129 | * other cases. | |
130 | */ | |
131 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | |
132 | ||
133 | static __inline__ void atomic_dec(atomic_t *v) | |
134 | { | |
135 | int t; | |
136 | ||
137 | __asm__ __volatile__( | |
138 | "1: lwarx %0,0,%2 # atomic_dec\n\ | |
139 | addic %0,%0,-1\n" | |
140 | PPC405_ERR77(0,%2)\ | |
141 | " stwcx. %0,0,%2\n\ | |
142 | bne- 1b" | |
143 | : "=&r" (t), "=m" (v->counter) | |
144 | : "r" (&v->counter), "m" (v->counter) | |
145 | : "cc"); | |
146 | } | |
147 | ||
148 | static __inline__ int atomic_dec_return(atomic_t *v) | |
149 | { | |
150 | int t; | |
151 | ||
152 | __asm__ __volatile__( | |
feaf7cf1 | 153 | EIEIO_ON_SMP |
1da177e4 LT |
154 | "1: lwarx %0,0,%1 # atomic_dec_return\n\ |
155 | addic %0,%0,-1\n" | |
156 | PPC405_ERR77(0,%1) | |
157 | " stwcx. %0,0,%1\n\ | |
158 | bne- 1b" | |
feaf7cf1 | 159 | ISYNC_ON_SMP |
1da177e4 LT |
160 | : "=&r" (t) |
161 | : "r" (&v->counter) | |
162 | : "cc", "memory"); | |
163 | ||
164 | return t; | |
165 | } | |
166 | ||
4a6dae6d NP |
167 | #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) |
168 | ||
1da177e4 LT |
169 | #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) |
170 | #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) | |
171 | ||
172 | /* | |
173 | * Atomically test *v and decrement if it is greater than 0. | |
174 | * The function returns the old value of *v minus 1. | |
175 | */ | |
176 | static __inline__ int atomic_dec_if_positive(atomic_t *v) | |
177 | { | |
178 | int t; | |
179 | ||
180 | __asm__ __volatile__( | |
feaf7cf1 | 181 | EIEIO_ON_SMP |
1da177e4 LT |
182 | "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ |
183 | addic. %0,%0,-1\n\ | |
184 | blt- 2f\n" | |
185 | PPC405_ERR77(0,%1) | |
186 | " stwcx. %0,0,%1\n\ | |
187 | bne- 1b" | |
feaf7cf1 | 188 | ISYNC_ON_SMP |
1da177e4 LT |
189 | "\n\ |
190 | 2:" : "=&r" (t) | |
191 | : "r" (&v->counter) | |
192 | : "cc", "memory"); | |
193 | ||
194 | return t; | |
195 | } | |
196 | ||
feaf7cf1 BB |
197 | #define smp_mb__before_atomic_dec() smp_mb() |
198 | #define smp_mb__after_atomic_dec() smp_mb() | |
199 | #define smp_mb__before_atomic_inc() smp_mb() | |
200 | #define smp_mb__after_atomic_inc() smp_mb() | |
1da177e4 | 201 | |
06a98dba SR |
202 | #ifdef __powerpc64__ |
203 | ||
204 | typedef struct { volatile long counter; } atomic64_t; | |
205 | ||
206 | #define ATOMIC64_INIT(i) { (i) } | |
207 | ||
208 | #define atomic64_read(v) ((v)->counter) | |
209 | #define atomic64_set(v,i) (((v)->counter) = (i)) | |
210 | ||
211 | static __inline__ void atomic64_add(long a, atomic64_t *v) | |
212 | { | |
213 | long t; | |
214 | ||
215 | __asm__ __volatile__( | |
216 | "1: ldarx %0,0,%3 # atomic64_add\n\ | |
217 | add %0,%2,%0\n\ | |
218 | stdcx. %0,0,%3 \n\ | |
219 | bne- 1b" | |
220 | : "=&r" (t), "=m" (v->counter) | |
221 | : "r" (a), "r" (&v->counter), "m" (v->counter) | |
222 | : "cc"); | |
223 | } | |
224 | ||
225 | static __inline__ long atomic64_add_return(long a, atomic64_t *v) | |
226 | { | |
227 | long t; | |
228 | ||
229 | __asm__ __volatile__( | |
230 | EIEIO_ON_SMP | |
231 | "1: ldarx %0,0,%2 # atomic64_add_return\n\ | |
232 | add %0,%1,%0\n\ | |
233 | stdcx. %0,0,%2 \n\ | |
234 | bne- 1b" | |
235 | ISYNC_ON_SMP | |
236 | : "=&r" (t) | |
237 | : "r" (a), "r" (&v->counter) | |
238 | : "cc", "memory"); | |
239 | ||
240 | return t; | |
241 | } | |
242 | ||
243 | #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) | |
244 | ||
245 | static __inline__ void atomic64_sub(long a, atomic64_t *v) | |
246 | { | |
247 | long t; | |
248 | ||
249 | __asm__ __volatile__( | |
250 | "1: ldarx %0,0,%3 # atomic64_sub\n\ | |
251 | subf %0,%2,%0\n\ | |
252 | stdcx. %0,0,%3 \n\ | |
253 | bne- 1b" | |
254 | : "=&r" (t), "=m" (v->counter) | |
255 | : "r" (a), "r" (&v->counter), "m" (v->counter) | |
256 | : "cc"); | |
257 | } | |
258 | ||
259 | static __inline__ long atomic64_sub_return(long a, atomic64_t *v) | |
260 | { | |
261 | long t; | |
262 | ||
263 | __asm__ __volatile__( | |
264 | EIEIO_ON_SMP | |
265 | "1: ldarx %0,0,%2 # atomic64_sub_return\n\ | |
266 | subf %0,%1,%0\n\ | |
267 | stdcx. %0,0,%2 \n\ | |
268 | bne- 1b" | |
269 | ISYNC_ON_SMP | |
270 | : "=&r" (t) | |
271 | : "r" (a), "r" (&v->counter) | |
272 | : "cc", "memory"); | |
273 | ||
274 | return t; | |
275 | } | |
276 | ||
277 | static __inline__ void atomic64_inc(atomic64_t *v) | |
278 | { | |
279 | long t; | |
280 | ||
281 | __asm__ __volatile__( | |
282 | "1: ldarx %0,0,%2 # atomic64_inc\n\ | |
283 | addic %0,%0,1\n\ | |
284 | stdcx. %0,0,%2 \n\ | |
285 | bne- 1b" | |
286 | : "=&r" (t), "=m" (v->counter) | |
287 | : "r" (&v->counter), "m" (v->counter) | |
288 | : "cc"); | |
289 | } | |
290 | ||
291 | static __inline__ long atomic64_inc_return(atomic64_t *v) | |
292 | { | |
293 | long t; | |
294 | ||
295 | __asm__ __volatile__( | |
296 | EIEIO_ON_SMP | |
297 | "1: ldarx %0,0,%1 # atomic64_inc_return\n\ | |
298 | addic %0,%0,1\n\ | |
299 | stdcx. %0,0,%1 \n\ | |
300 | bne- 1b" | |
301 | ISYNC_ON_SMP | |
302 | : "=&r" (t) | |
303 | : "r" (&v->counter) | |
304 | : "cc", "memory"); | |
305 | ||
306 | return t; | |
307 | } | |
308 | ||
309 | /* | |
310 | * atomic64_inc_and_test - increment and test | |
311 | * @v: pointer of type atomic64_t | |
312 | * | |
313 | * Atomically increments @v by 1 | |
314 | * and returns true if the result is zero, or false for all | |
315 | * other cases. | |
316 | */ | |
317 | #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) | |
318 | ||
319 | static __inline__ void atomic64_dec(atomic64_t *v) | |
320 | { | |
321 | long t; | |
322 | ||
323 | __asm__ __volatile__( | |
324 | "1: ldarx %0,0,%2 # atomic64_dec\n\ | |
325 | addic %0,%0,-1\n\ | |
326 | stdcx. %0,0,%2\n\ | |
327 | bne- 1b" | |
328 | : "=&r" (t), "=m" (v->counter) | |
329 | : "r" (&v->counter), "m" (v->counter) | |
330 | : "cc"); | |
331 | } | |
332 | ||
333 | static __inline__ long atomic64_dec_return(atomic64_t *v) | |
334 | { | |
335 | long t; | |
336 | ||
337 | __asm__ __volatile__( | |
338 | EIEIO_ON_SMP | |
339 | "1: ldarx %0,0,%1 # atomic64_dec_return\n\ | |
340 | addic %0,%0,-1\n\ | |
341 | stdcx. %0,0,%1\n\ | |
342 | bne- 1b" | |
343 | ISYNC_ON_SMP | |
344 | : "=&r" (t) | |
345 | : "r" (&v->counter) | |
346 | : "cc", "memory"); | |
347 | ||
348 | return t; | |
349 | } | |
350 | ||
351 | #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0) | |
352 | #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) | |
353 | ||
354 | /* | |
355 | * Atomically test *v and decrement if it is greater than 0. | |
356 | * The function returns the old value of *v minus 1. | |
357 | */ | |
358 | static __inline__ long atomic64_dec_if_positive(atomic64_t *v) | |
359 | { | |
360 | long t; | |
361 | ||
362 | __asm__ __volatile__( | |
363 | EIEIO_ON_SMP | |
364 | "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ | |
365 | addic. %0,%0,-1\n\ | |
366 | blt- 2f\n\ | |
367 | stdcx. %0,0,%1\n\ | |
368 | bne- 1b" | |
369 | ISYNC_ON_SMP | |
370 | "\n\ | |
371 | 2:" : "=&r" (t) | |
372 | : "r" (&v->counter) | |
373 | : "cc", "memory"); | |
374 | ||
375 | return t; | |
376 | } | |
377 | ||
378 | #endif /* __powerpc64__ */ | |
379 | ||
1da177e4 | 380 | #endif /* __KERNEL__ */ |
feaf7cf1 | 381 | #endif /* _ASM_POWERPC_ATOMIC_H_ */ |