Commit | Line | Data |
---|---|---|
fef74705 RB |
1 | /* |
2 | * This file is subject to the terms and conditions of the GNU General Public | |
3 | * License. See the file "COPYING" in the main directory of this archive | |
4 | * for more details. | |
5 | * | |
6 | * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org) | |
7 | */ | |
8 | #ifndef __ASM_CMPXCHG_H | |
9 | #define __ASM_CMPXCHG_H | |
10 | ||
5520e426 | 11 | #include <linux/bug.h> |
fef74705 | 12 | #include <linux/irqflags.h> |
b81947c6 DH |
13 | #include <asm/war.h> |
14 | ||
15 | static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) | |
16 | { | |
17 | __u32 retval; | |
18 | ||
19 | smp_mb__before_llsc(); | |
20 | ||
21 | if (kernel_uses_llsc && R10000_LLSC_WAR) { | |
22 | unsigned long dummy; | |
23 | ||
24 | __asm__ __volatile__( | |
a809d460 | 25 | " .set arch=r4000 \n" |
b81947c6 DH |
26 | "1: ll %0, %3 # xchg_u32 \n" |
27 | " .set mips0 \n" | |
28 | " move %2, %z4 \n" | |
a809d460 | 29 | " .set arch=r4000 \n" |
b81947c6 DH |
30 | " sc %2, %1 \n" |
31 | " beqzl %2, 1b \n" | |
32 | " .set mips0 \n" | |
33 | : "=&r" (retval), "=m" (*m), "=&r" (dummy) | |
34 | : "R" (*m), "Jr" (val) | |
35 | : "memory"); | |
36 | } else if (kernel_uses_llsc) { | |
37 | unsigned long dummy; | |
38 | ||
39 | do { | |
40 | __asm__ __volatile__( | |
a809d460 | 41 | " .set arch=r4000 \n" |
b81947c6 DH |
42 | " ll %0, %3 # xchg_u32 \n" |
43 | " .set mips0 \n" | |
44 | " move %2, %z4 \n" | |
a809d460 | 45 | " .set arch=r4000 \n" |
b81947c6 DH |
46 | " sc %2, %1 \n" |
47 | " .set mips0 \n" | |
48 | : "=&r" (retval), "=m" (*m), "=&r" (dummy) | |
49 | : "R" (*m), "Jr" (val) | |
50 | : "memory"); | |
51 | } while (unlikely(!dummy)); | |
52 | } else { | |
53 | unsigned long flags; | |
54 | ||
55 | raw_local_irq_save(flags); | |
56 | retval = *m; | |
57 | *m = val; | |
58 | raw_local_irq_restore(flags); /* implies memory barrier */ | |
59 | } | |
60 | ||
61 | smp_llsc_mb(); | |
62 | ||
63 | return retval; | |
64 | } | |
65 | ||
66 | #ifdef CONFIG_64BIT | |
67 | static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) | |
68 | { | |
69 | __u64 retval; | |
70 | ||
71 | smp_mb__before_llsc(); | |
72 | ||
73 | if (kernel_uses_llsc && R10000_LLSC_WAR) { | |
74 | unsigned long dummy; | |
75 | ||
76 | __asm__ __volatile__( | |
a809d460 | 77 | " .set arch=r4000 \n" |
b81947c6 DH |
78 | "1: lld %0, %3 # xchg_u64 \n" |
79 | " move %2, %z4 \n" | |
80 | " scd %2, %1 \n" | |
81 | " beqzl %2, 1b \n" | |
82 | " .set mips0 \n" | |
83 | : "=&r" (retval), "=m" (*m), "=&r" (dummy) | |
84 | : "R" (*m), "Jr" (val) | |
85 | : "memory"); | |
86 | } else if (kernel_uses_llsc) { | |
87 | unsigned long dummy; | |
88 | ||
89 | do { | |
90 | __asm__ __volatile__( | |
a809d460 | 91 | " .set arch=r4000 \n" |
b81947c6 DH |
92 | " lld %0, %3 # xchg_u64 \n" |
93 | " move %2, %z4 \n" | |
94 | " scd %2, %1 \n" | |
95 | " .set mips0 \n" | |
96 | : "=&r" (retval), "=m" (*m), "=&r" (dummy) | |
97 | : "R" (*m), "Jr" (val) | |
98 | : "memory"); | |
99 | } while (unlikely(!dummy)); | |
100 | } else { | |
101 | unsigned long flags; | |
102 | ||
103 | raw_local_irq_save(flags); | |
104 | retval = *m; | |
105 | *m = val; | |
106 | raw_local_irq_restore(flags); /* implies memory barrier */ | |
107 | } | |
108 | ||
109 | smp_llsc_mb(); | |
110 | ||
111 | return retval; | |
112 | } | |
113 | #else | |
114 | extern __u64 __xchg_u64_unsupported_on_32bit_kernels(volatile __u64 * m, __u64 val); | |
115 | #define __xchg_u64 __xchg_u64_unsupported_on_32bit_kernels | |
116 | #endif | |
117 | ||
118 | static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size) | |
119 | { | |
120 | switch (size) { | |
121 | case 4: | |
122 | return __xchg_u32(ptr, x); | |
123 | case 8: | |
124 | return __xchg_u64(ptr, x); | |
125 | } | |
126 | ||
127 | return x; | |
128 | } | |
129 | ||
130 | #define xchg(ptr, x) \ | |
131 | ({ \ | |
132 | BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc); \ | |
133 | \ | |
134 | ((__typeof__(*(ptr))) \ | |
135 | __xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \ | |
136 | }) | |
fef74705 RB |
137 | |
138 | #define __HAVE_ARCH_CMPXCHG 1 | |
139 | ||
140 | #define __cmpxchg_asm(ld, st, m, old, new) \ | |
141 | ({ \ | |
142 | __typeof(*(m)) __ret; \ | |
143 | \ | |
9eed4124 | 144 | if (kernel_uses_llsc && R10000_LLSC_WAR) { \ |
fef74705 RB |
145 | __asm__ __volatile__( \ |
146 | " .set push \n" \ | |
147 | " .set noat \n" \ | |
a809d460 | 148 | " .set arch=r4000 \n" \ |
70342287 | 149 | "1: " ld " %0, %2 # __cmpxchg_asm \n" \ |
fef74705 RB |
150 | " bne %0, %z3, 2f \n" \ |
151 | " .set mips0 \n" \ | |
152 | " move $1, %z4 \n" \ | |
a809d460 | 153 | " .set arch=r4000 \n" \ |
fef74705 RB |
154 | " " st " $1, %1 \n" \ |
155 | " beqzl $1, 1b \n" \ | |
156 | "2: \n" \ | |
157 | " .set pop \n" \ | |
158 | : "=&r" (__ret), "=R" (*m) \ | |
159 | : "R" (*m), "Jr" (old), "Jr" (new) \ | |
160 | : "memory"); \ | |
b791d119 | 161 | } else if (kernel_uses_llsc) { \ |
fef74705 RB |
162 | __asm__ __volatile__( \ |
163 | " .set push \n" \ | |
164 | " .set noat \n" \ | |
a809d460 | 165 | " .set arch=r4000 \n" \ |
70342287 | 166 | "1: " ld " %0, %2 # __cmpxchg_asm \n" \ |
fef74705 RB |
167 | " bne %0, %z3, 2f \n" \ |
168 | " .set mips0 \n" \ | |
169 | " move $1, %z4 \n" \ | |
a809d460 | 170 | " .set arch=r4000 \n" \ |
fef74705 | 171 | " " st " $1, %1 \n" \ |
7837314d | 172 | " beqz $1, 1b \n" \ |
fef74705 | 173 | " .set pop \n" \ |
7837314d | 174 | "2: \n" \ |
fef74705 RB |
175 | : "=&r" (__ret), "=R" (*m) \ |
176 | : "R" (*m), "Jr" (old), "Jr" (new) \ | |
177 | : "memory"); \ | |
178 | } else { \ | |
179 | unsigned long __flags; \ | |
180 | \ | |
181 | raw_local_irq_save(__flags); \ | |
182 | __ret = *m; \ | |
183 | if (__ret == old) \ | |
184 | *m = new; \ | |
185 | raw_local_irq_restore(__flags); \ | |
186 | } \ | |
187 | \ | |
188 | __ret; \ | |
189 | }) | |
190 | ||
191 | /* | |
192 | * This function doesn't exist, so you'll get a linker error | |
193 | * if something tries to do an invalid cmpxchg(). | |
194 | */ | |
195 | extern void __cmpxchg_called_with_bad_pointer(void); | |
196 | ||
f252ffd5 | 197 | #define __cmpxchg(ptr, old, new, pre_barrier, post_barrier) \ |
fef74705 RB |
198 | ({ \ |
199 | __typeof__(ptr) __ptr = (ptr); \ | |
200 | __typeof__(*(ptr)) __old = (old); \ | |
201 | __typeof__(*(ptr)) __new = (new); \ | |
202 | __typeof__(*(ptr)) __res = 0; \ | |
203 | \ | |
f252ffd5 | 204 | pre_barrier; \ |
fef74705 RB |
205 | \ |
206 | switch (sizeof(*(__ptr))) { \ | |
207 | case 4: \ | |
70342287 | 208 | __res = __cmpxchg_asm("ll", "sc", __ptr, __old, __new); \ |
fef74705 RB |
209 | break; \ |
210 | case 8: \ | |
211 | if (sizeof(long) == 8) { \ | |
212 | __res = __cmpxchg_asm("lld", "scd", __ptr, \ | |
213 | __old, __new); \ | |
214 | break; \ | |
215 | } \ | |
216 | default: \ | |
217 | __cmpxchg_called_with_bad_pointer(); \ | |
218 | break; \ | |
219 | } \ | |
220 | \ | |
f252ffd5 | 221 | post_barrier; \ |
fef74705 RB |
222 | \ |
223 | __res; \ | |
224 | }) | |
225 | ||
f252ffd5 DD |
226 | #define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb()) |
227 | #define cmpxchg_local(ptr, old, new) __cmpxchg(ptr, old, new, , ) | |
fef74705 | 228 | |
3b96a56d MD |
229 | #define cmpxchg64(ptr, o, n) \ |
230 | ({ \ | |
231 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ | |
232 | cmpxchg((ptr), (o), (n)); \ | |
233 | }) | |
234 | ||
235 | #ifdef CONFIG_64BIT | |
236 | #define cmpxchg64_local(ptr, o, n) \ | |
237 | ({ \ | |
238 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ | |
239 | cmpxchg_local((ptr), (o), (n)); \ | |
240 | }) | |
241 | #else | |
242 | #include <asm-generic/cmpxchg-local.h> | |
243 | #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) | |
244 | #endif | |
245 | ||
fef74705 | 246 | #endif /* __ASM_CMPXCHG_H */ |