Commit | Line | Data |
---|---|---|
ae3a197e DH |
1 | #ifndef _ASM_POWERPC_CMPXCHG_H_ |
2 | #define _ASM_POWERPC_CMPXCHG_H_ | |
3 | ||
4 | #ifdef __KERNEL__ | |
5 | #include <linux/compiler.h> | |
6 | #include <asm/synch.h> | |
7 | #include <asm/asm-compat.h> | |
10d8b148 | 8 | #include <linux/bug.h> |
ae3a197e DH |
9 | |
10 | /* | |
11 | * Atomic exchange | |
12 | * | |
26760fc1 | 13 | * Changes the memory location '*p' to be val and returns |
ae3a197e DH |
14 | * the previous value stored there. |
15 | */ | |
26760fc1 | 16 | |
ae3a197e | 17 | static __always_inline unsigned long |
26760fc1 | 18 | __xchg_u32_local(volatile void *p, unsigned long val) |
ae3a197e DH |
19 | { |
20 | unsigned long prev; | |
21 | ||
22 | __asm__ __volatile__( | |
ae3a197e DH |
23 | "1: lwarx %0,0,%2 \n" |
24 | PPC405_ERR77(0,%2) | |
25 | " stwcx. %3,0,%2 \n\ | |
26 | bne- 1b" | |
ae3a197e DH |
27 | : "=&r" (prev), "+m" (*(volatile unsigned int *)p) |
28 | : "r" (p), "r" (val) | |
29 | : "cc", "memory"); | |
30 | ||
31 | return prev; | |
32 | } | |
33 | ||
ae3a197e | 34 | static __always_inline unsigned long |
26760fc1 | 35 | __xchg_u32_relaxed(u32 *p, unsigned long val) |
ae3a197e DH |
36 | { |
37 | unsigned long prev; | |
38 | ||
39 | __asm__ __volatile__( | |
26760fc1 BF |
40 | "1: lwarx %0,0,%2\n" |
41 | PPC405_ERR77(0, %2) | |
42 | " stwcx. %3,0,%2\n" | |
43 | " bne- 1b" | |
44 | : "=&r" (prev), "+m" (*p) | |
ae3a197e | 45 | : "r" (p), "r" (val) |
26760fc1 | 46 | : "cc"); |
ae3a197e DH |
47 | |
48 | return prev; | |
49 | } | |
50 | ||
51 | #ifdef CONFIG_PPC64 | |
52 | static __always_inline unsigned long | |
26760fc1 | 53 | __xchg_u64_local(volatile void *p, unsigned long val) |
ae3a197e DH |
54 | { |
55 | unsigned long prev; | |
56 | ||
57 | __asm__ __volatile__( | |
ae3a197e DH |
58 | "1: ldarx %0,0,%2 \n" |
59 | PPC405_ERR77(0,%2) | |
60 | " stdcx. %3,0,%2 \n\ | |
61 | bne- 1b" | |
ae3a197e DH |
62 | : "=&r" (prev), "+m" (*(volatile unsigned long *)p) |
63 | : "r" (p), "r" (val) | |
64 | : "cc", "memory"); | |
65 | ||
66 | return prev; | |
67 | } | |
68 | ||
69 | static __always_inline unsigned long | |
26760fc1 | 70 | __xchg_u64_relaxed(u64 *p, unsigned long val) |
ae3a197e DH |
71 | { |
72 | unsigned long prev; | |
73 | ||
74 | __asm__ __volatile__( | |
26760fc1 BF |
75 | "1: ldarx %0,0,%2\n" |
76 | PPC405_ERR77(0, %2) | |
77 | " stdcx. %3,0,%2\n" | |
78 | " bne- 1b" | |
79 | : "=&r" (prev), "+m" (*p) | |
ae3a197e | 80 | : "r" (p), "r" (val) |
26760fc1 | 81 | : "cc"); |
ae3a197e DH |
82 | |
83 | return prev; | |
84 | } | |
85 | #endif | |
86 | ||
ae3a197e | 87 | static __always_inline unsigned long |
26760fc1 | 88 | __xchg_local(volatile void *ptr, unsigned long x, unsigned int size) |
ae3a197e DH |
89 | { |
90 | switch (size) { | |
91 | case 4: | |
26760fc1 | 92 | return __xchg_u32_local(ptr, x); |
ae3a197e DH |
93 | #ifdef CONFIG_PPC64 |
94 | case 8: | |
26760fc1 | 95 | return __xchg_u64_local(ptr, x); |
ae3a197e DH |
96 | #endif |
97 | } | |
10d8b148 | 98 | BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg"); |
ae3a197e DH |
99 | return x; |
100 | } | |
101 | ||
102 | static __always_inline unsigned long | |
26760fc1 | 103 | __xchg_relaxed(void *ptr, unsigned long x, unsigned int size) |
ae3a197e DH |
104 | { |
105 | switch (size) { | |
106 | case 4: | |
26760fc1 | 107 | return __xchg_u32_relaxed(ptr, x); |
ae3a197e DH |
108 | #ifdef CONFIG_PPC64 |
109 | case 8: | |
26760fc1 | 110 | return __xchg_u64_relaxed(ptr, x); |
ae3a197e DH |
111 | #endif |
112 | } | |
10d8b148 | 113 | BUILD_BUG_ON_MSG(1, "Unsupported size for __xchg_local"); |
ae3a197e DH |
114 | return x; |
115 | } | |
ae3a197e DH |
116 | #define xchg_local(ptr,x) \ |
117 | ({ \ | |
118 | __typeof__(*(ptr)) _x_ = (x); \ | |
119 | (__typeof__(*(ptr))) __xchg_local((ptr), \ | |
120 | (unsigned long)_x_, sizeof(*(ptr))); \ | |
121 | }) | |
122 | ||
26760fc1 BF |
123 | #define xchg_relaxed(ptr, x) \ |
124 | ({ \ | |
125 | __typeof__(*(ptr)) _x_ = (x); \ | |
126 | (__typeof__(*(ptr))) __xchg_relaxed((ptr), \ | |
127 | (unsigned long)_x_, sizeof(*(ptr))); \ | |
128 | }) | |
ae3a197e DH |
129 | /* |
130 | * Compare and exchange - if *p == old, set it to new, | |
131 | * and return the old value of *p. | |
132 | */ | |
ae3a197e DH |
133 | |
134 | static __always_inline unsigned long | |
135 | __cmpxchg_u32(volatile unsigned int *p, unsigned long old, unsigned long new) | |
136 | { | |
137 | unsigned int prev; | |
138 | ||
139 | __asm__ __volatile__ ( | |
81d7a329 | 140 | PPC_ATOMIC_ENTRY_BARRIER |
ae3a197e DH |
141 | "1: lwarx %0,0,%2 # __cmpxchg_u32\n\ |
142 | cmpw 0,%0,%3\n\ | |
143 | bne- 2f\n" | |
144 | PPC405_ERR77(0,%2) | |
145 | " stwcx. %4,0,%2\n\ | |
146 | bne- 1b" | |
81d7a329 | 147 | PPC_ATOMIC_EXIT_BARRIER |
ae3a197e DH |
148 | "\n\ |
149 | 2:" | |
150 | : "=&r" (prev), "+m" (*p) | |
151 | : "r" (p), "r" (old), "r" (new) | |
152 | : "cc", "memory"); | |
153 | ||
154 | return prev; | |
155 | } | |
156 | ||
157 | static __always_inline unsigned long | |
158 | __cmpxchg_u32_local(volatile unsigned int *p, unsigned long old, | |
159 | unsigned long new) | |
160 | { | |
161 | unsigned int prev; | |
162 | ||
163 | __asm__ __volatile__ ( | |
164 | "1: lwarx %0,0,%2 # __cmpxchg_u32\n\ | |
165 | cmpw 0,%0,%3\n\ | |
166 | bne- 2f\n" | |
167 | PPC405_ERR77(0,%2) | |
168 | " stwcx. %4,0,%2\n\ | |
169 | bne- 1b" | |
170 | "\n\ | |
171 | 2:" | |
172 | : "=&r" (prev), "+m" (*p) | |
173 | : "r" (p), "r" (old), "r" (new) | |
174 | : "cc", "memory"); | |
175 | ||
176 | return prev; | |
177 | } | |
178 | ||
56c08e6d BF |
179 | static __always_inline unsigned long |
180 | __cmpxchg_u32_relaxed(u32 *p, unsigned long old, unsigned long new) | |
181 | { | |
182 | unsigned long prev; | |
183 | ||
184 | __asm__ __volatile__ ( | |
185 | "1: lwarx %0,0,%2 # __cmpxchg_u32_relaxed\n" | |
186 | " cmpw 0,%0,%3\n" | |
187 | " bne- 2f\n" | |
188 | PPC405_ERR77(0, %2) | |
189 | " stwcx. %4,0,%2\n" | |
190 | " bne- 1b\n" | |
191 | "2:" | |
192 | : "=&r" (prev), "+m" (*p) | |
193 | : "r" (p), "r" (old), "r" (new) | |
194 | : "cc"); | |
195 | ||
196 | return prev; | |
197 | } | |
198 | ||
199 | /* | |
200 | * cmpxchg family don't have order guarantee if cmp part fails, therefore we | |
201 | * can avoid superfluous barriers if we use assembly code to implement | |
202 | * cmpxchg() and cmpxchg_acquire(), however we don't do the similar for | |
203 | * cmpxchg_release() because that will result in putting a barrier in the | |
204 | * middle of a ll/sc loop, which is probably a bad idea. For example, this | |
205 | * might cause the conditional store more likely to fail. | |
206 | */ | |
207 | static __always_inline unsigned long | |
208 | __cmpxchg_u32_acquire(u32 *p, unsigned long old, unsigned long new) | |
209 | { | |
210 | unsigned long prev; | |
211 | ||
212 | __asm__ __volatile__ ( | |
213 | "1: lwarx %0,0,%2 # __cmpxchg_u32_acquire\n" | |
214 | " cmpw 0,%0,%3\n" | |
215 | " bne- 2f\n" | |
216 | PPC405_ERR77(0, %2) | |
217 | " stwcx. %4,0,%2\n" | |
218 | " bne- 1b\n" | |
219 | PPC_ACQUIRE_BARRIER | |
220 | "\n" | |
221 | "2:" | |
222 | : "=&r" (prev), "+m" (*p) | |
223 | : "r" (p), "r" (old), "r" (new) | |
224 | : "cc", "memory"); | |
225 | ||
226 | return prev; | |
227 | } | |
228 | ||
ae3a197e DH |
229 | #ifdef CONFIG_PPC64 |
230 | static __always_inline unsigned long | |
231 | __cmpxchg_u64(volatile unsigned long *p, unsigned long old, unsigned long new) | |
232 | { | |
233 | unsigned long prev; | |
234 | ||
235 | __asm__ __volatile__ ( | |
81d7a329 | 236 | PPC_ATOMIC_ENTRY_BARRIER |
ae3a197e DH |
237 | "1: ldarx %0,0,%2 # __cmpxchg_u64\n\ |
238 | cmpd 0,%0,%3\n\ | |
239 | bne- 2f\n\ | |
240 | stdcx. %4,0,%2\n\ | |
241 | bne- 1b" | |
81d7a329 | 242 | PPC_ATOMIC_EXIT_BARRIER |
ae3a197e DH |
243 | "\n\ |
244 | 2:" | |
245 | : "=&r" (prev), "+m" (*p) | |
246 | : "r" (p), "r" (old), "r" (new) | |
247 | : "cc", "memory"); | |
248 | ||
249 | return prev; | |
250 | } | |
251 | ||
252 | static __always_inline unsigned long | |
253 | __cmpxchg_u64_local(volatile unsigned long *p, unsigned long old, | |
254 | unsigned long new) | |
255 | { | |
256 | unsigned long prev; | |
257 | ||
258 | __asm__ __volatile__ ( | |
259 | "1: ldarx %0,0,%2 # __cmpxchg_u64\n\ | |
260 | cmpd 0,%0,%3\n\ | |
261 | bne- 2f\n\ | |
262 | stdcx. %4,0,%2\n\ | |
263 | bne- 1b" | |
264 | "\n\ | |
265 | 2:" | |
266 | : "=&r" (prev), "+m" (*p) | |
267 | : "r" (p), "r" (old), "r" (new) | |
268 | : "cc", "memory"); | |
269 | ||
270 | return prev; | |
271 | } | |
56c08e6d BF |
272 | |
273 | static __always_inline unsigned long | |
274 | __cmpxchg_u64_relaxed(u64 *p, unsigned long old, unsigned long new) | |
275 | { | |
276 | unsigned long prev; | |
277 | ||
278 | __asm__ __volatile__ ( | |
279 | "1: ldarx %0,0,%2 # __cmpxchg_u64_relaxed\n" | |
280 | " cmpd 0,%0,%3\n" | |
281 | " bne- 2f\n" | |
282 | " stdcx. %4,0,%2\n" | |
283 | " bne- 1b\n" | |
284 | "2:" | |
285 | : "=&r" (prev), "+m" (*p) | |
286 | : "r" (p), "r" (old), "r" (new) | |
287 | : "cc"); | |
288 | ||
289 | return prev; | |
290 | } | |
291 | ||
292 | static __always_inline unsigned long | |
293 | __cmpxchg_u64_acquire(u64 *p, unsigned long old, unsigned long new) | |
294 | { | |
295 | unsigned long prev; | |
296 | ||
297 | __asm__ __volatile__ ( | |
298 | "1: ldarx %0,0,%2 # __cmpxchg_u64_acquire\n" | |
299 | " cmpd 0,%0,%3\n" | |
300 | " bne- 2f\n" | |
301 | " stdcx. %4,0,%2\n" | |
302 | " bne- 1b\n" | |
303 | PPC_ACQUIRE_BARRIER | |
304 | "\n" | |
305 | "2:" | |
306 | : "=&r" (prev), "+m" (*p) | |
307 | : "r" (p), "r" (old), "r" (new) | |
308 | : "cc", "memory"); | |
309 | ||
310 | return prev; | |
311 | } | |
ae3a197e DH |
312 | #endif |
313 | ||
ae3a197e DH |
314 | static __always_inline unsigned long |
315 | __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, | |
316 | unsigned int size) | |
317 | { | |
318 | switch (size) { | |
319 | case 4: | |
320 | return __cmpxchg_u32(ptr, old, new); | |
321 | #ifdef CONFIG_PPC64 | |
322 | case 8: | |
323 | return __cmpxchg_u64(ptr, old, new); | |
324 | #endif | |
325 | } | |
10d8b148 | 326 | BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg"); |
ae3a197e DH |
327 | return old; |
328 | } | |
329 | ||
330 | static __always_inline unsigned long | |
331 | __cmpxchg_local(volatile void *ptr, unsigned long old, unsigned long new, | |
332 | unsigned int size) | |
333 | { | |
334 | switch (size) { | |
335 | case 4: | |
336 | return __cmpxchg_u32_local(ptr, old, new); | |
337 | #ifdef CONFIG_PPC64 | |
338 | case 8: | |
339 | return __cmpxchg_u64_local(ptr, old, new); | |
340 | #endif | |
341 | } | |
10d8b148 | 342 | BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_local"); |
ae3a197e DH |
343 | return old; |
344 | } | |
345 | ||
56c08e6d BF |
346 | static __always_inline unsigned long |
347 | __cmpxchg_relaxed(void *ptr, unsigned long old, unsigned long new, | |
348 | unsigned int size) | |
349 | { | |
350 | switch (size) { | |
351 | case 4: | |
352 | return __cmpxchg_u32_relaxed(ptr, old, new); | |
353 | #ifdef CONFIG_PPC64 | |
354 | case 8: | |
355 | return __cmpxchg_u64_relaxed(ptr, old, new); | |
356 | #endif | |
357 | } | |
10d8b148 | 358 | BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_relaxed"); |
56c08e6d BF |
359 | return old; |
360 | } | |
361 | ||
362 | static __always_inline unsigned long | |
363 | __cmpxchg_acquire(void *ptr, unsigned long old, unsigned long new, | |
364 | unsigned int size) | |
365 | { | |
366 | switch (size) { | |
367 | case 4: | |
368 | return __cmpxchg_u32_acquire(ptr, old, new); | |
369 | #ifdef CONFIG_PPC64 | |
370 | case 8: | |
371 | return __cmpxchg_u64_acquire(ptr, old, new); | |
372 | #endif | |
373 | } | |
10d8b148 | 374 | BUILD_BUG_ON_MSG(1, "Unsupported size for __cmpxchg_acquire"); |
56c08e6d BF |
375 | return old; |
376 | } | |
ae3a197e DH |
377 | #define cmpxchg(ptr, o, n) \ |
378 | ({ \ | |
379 | __typeof__(*(ptr)) _o_ = (o); \ | |
380 | __typeof__(*(ptr)) _n_ = (n); \ | |
381 | (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \ | |
382 | (unsigned long)_n_, sizeof(*(ptr))); \ | |
383 | }) | |
384 | ||
385 | ||
386 | #define cmpxchg_local(ptr, o, n) \ | |
387 | ({ \ | |
388 | __typeof__(*(ptr)) _o_ = (o); \ | |
389 | __typeof__(*(ptr)) _n_ = (n); \ | |
390 | (__typeof__(*(ptr))) __cmpxchg_local((ptr), (unsigned long)_o_, \ | |
391 | (unsigned long)_n_, sizeof(*(ptr))); \ | |
392 | }) | |
393 | ||
56c08e6d BF |
394 | #define cmpxchg_relaxed(ptr, o, n) \ |
395 | ({ \ | |
396 | __typeof__(*(ptr)) _o_ = (o); \ | |
397 | __typeof__(*(ptr)) _n_ = (n); \ | |
398 | (__typeof__(*(ptr))) __cmpxchg_relaxed((ptr), \ | |
399 | (unsigned long)_o_, (unsigned long)_n_, \ | |
400 | sizeof(*(ptr))); \ | |
401 | }) | |
402 | ||
403 | #define cmpxchg_acquire(ptr, o, n) \ | |
404 | ({ \ | |
405 | __typeof__(*(ptr)) _o_ = (o); \ | |
406 | __typeof__(*(ptr)) _n_ = (n); \ | |
407 | (__typeof__(*(ptr))) __cmpxchg_acquire((ptr), \ | |
408 | (unsigned long)_o_, (unsigned long)_n_, \ | |
409 | sizeof(*(ptr))); \ | |
410 | }) | |
ae3a197e DH |
411 | #ifdef CONFIG_PPC64 |
412 | #define cmpxchg64(ptr, o, n) \ | |
413 | ({ \ | |
414 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ | |
415 | cmpxchg((ptr), (o), (n)); \ | |
416 | }) | |
417 | #define cmpxchg64_local(ptr, o, n) \ | |
418 | ({ \ | |
419 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ | |
420 | cmpxchg_local((ptr), (o), (n)); \ | |
421 | }) | |
56c08e6d BF |
422 | #define cmpxchg64_relaxed(ptr, o, n) \ |
423 | ({ \ | |
424 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ | |
425 | cmpxchg_relaxed((ptr), (o), (n)); \ | |
426 | }) | |
427 | #define cmpxchg64_acquire(ptr, o, n) \ | |
428 | ({ \ | |
429 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ | |
430 | cmpxchg_acquire((ptr), (o), (n)); \ | |
431 | }) | |
ae3a197e DH |
432 | #else |
433 | #include <asm-generic/cmpxchg-local.h> | |
434 | #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) | |
435 | #endif | |
436 | ||
437 | #endif /* __KERNEL__ */ | |
438 | #endif /* _ASM_POWERPC_CMPXCHG_H_ */ |