Commit | Line | Data |
---|---|---|
fef74705 RB |
1 | /* |
2 | * This file is subject to the terms and conditions of the GNU General Public | |
3 | * License. See the file "COPYING" in the main directory of this archive | |
4 | * for more details. | |
5 | * | |
6 | * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org) | |
7 | */ | |
8 | #ifndef __ASM_CMPXCHG_H | |
9 | #define __ASM_CMPXCHG_H | |
10 | ||
5520e426 | 11 | #include <linux/bug.h> |
fef74705 | 12 | #include <linux/irqflags.h> |
b0984c43 | 13 | #include <asm/compiler.h> |
b81947c6 DH |
14 | #include <asm/war.h> |
15 | ||
16 | static inline unsigned long __xchg_u32(volatile int * m, unsigned int val) | |
17 | { | |
18 | __u32 retval; | |
19 | ||
20 | smp_mb__before_llsc(); | |
21 | ||
22 | if (kernel_uses_llsc && R10000_LLSC_WAR) { | |
23 | unsigned long dummy; | |
24 | ||
25 | __asm__ __volatile__( | |
a809d460 | 26 | " .set arch=r4000 \n" |
b81947c6 DH |
27 | "1: ll %0, %3 # xchg_u32 \n" |
28 | " .set mips0 \n" | |
29 | " move %2, %z4 \n" | |
a809d460 | 30 | " .set arch=r4000 \n" |
b81947c6 DH |
31 | " sc %2, %1 \n" |
32 | " beqzl %2, 1b \n" | |
33 | " .set mips0 \n" | |
94bfb75a MC |
34 | : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy) |
35 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) | |
b81947c6 DH |
36 | : "memory"); |
37 | } else if (kernel_uses_llsc) { | |
38 | unsigned long dummy; | |
39 | ||
40 | do { | |
41 | __asm__ __volatile__( | |
fa998ebb | 42 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
b81947c6 DH |
43 | " ll %0, %3 # xchg_u32 \n" |
44 | " .set mips0 \n" | |
45 | " move %2, %z4 \n" | |
fa998ebb | 46 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
b81947c6 DH |
47 | " sc %2, %1 \n" |
48 | " .set mips0 \n" | |
94bfb75a | 49 | : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), |
b0984c43 | 50 | "=&r" (dummy) |
94bfb75a | 51 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) |
b81947c6 DH |
52 | : "memory"); |
53 | } while (unlikely(!dummy)); | |
54 | } else { | |
55 | unsigned long flags; | |
56 | ||
57 | raw_local_irq_save(flags); | |
58 | retval = *m; | |
59 | *m = val; | |
60 | raw_local_irq_restore(flags); /* implies memory barrier */ | |
61 | } | |
62 | ||
63 | smp_llsc_mb(); | |
64 | ||
65 | return retval; | |
66 | } | |
67 | ||
68 | #ifdef CONFIG_64BIT | |
69 | static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val) | |
70 | { | |
71 | __u64 retval; | |
72 | ||
73 | smp_mb__before_llsc(); | |
74 | ||
75 | if (kernel_uses_llsc && R10000_LLSC_WAR) { | |
76 | unsigned long dummy; | |
77 | ||
78 | __asm__ __volatile__( | |
a809d460 | 79 | " .set arch=r4000 \n" |
b81947c6 DH |
80 | "1: lld %0, %3 # xchg_u64 \n" |
81 | " move %2, %z4 \n" | |
82 | " scd %2, %1 \n" | |
83 | " beqzl %2, 1b \n" | |
84 | " .set mips0 \n" | |
94bfb75a MC |
85 | : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy) |
86 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) | |
b81947c6 DH |
87 | : "memory"); |
88 | } else if (kernel_uses_llsc) { | |
89 | unsigned long dummy; | |
90 | ||
91 | do { | |
92 | __asm__ __volatile__( | |
fa998ebb | 93 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
b81947c6 DH |
94 | " lld %0, %3 # xchg_u64 \n" |
95 | " move %2, %z4 \n" | |
96 | " scd %2, %1 \n" | |
97 | " .set mips0 \n" | |
94bfb75a | 98 | : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), |
b0984c43 | 99 | "=&r" (dummy) |
94bfb75a | 100 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) |
b81947c6 DH |
101 | : "memory"); |
102 | } while (unlikely(!dummy)); | |
103 | } else { | |
104 | unsigned long flags; | |
105 | ||
106 | raw_local_irq_save(flags); | |
107 | retval = *m; | |
108 | *m = val; | |
109 | raw_local_irq_restore(flags); /* implies memory barrier */ | |
110 | } | |
111 | ||
112 | smp_llsc_mb(); | |
113 | ||
114 | return retval; | |
115 | } | |
116 | #else | |
117 | extern __u64 __xchg_u64_unsupported_on_32bit_kernels(volatile __u64 * m, __u64 val); | |
118 | #define __xchg_u64 __xchg_u64_unsupported_on_32bit_kernels | |
119 | #endif | |
120 | ||
121 | static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size) | |
122 | { | |
123 | switch (size) { | |
124 | case 4: | |
125 | return __xchg_u32(ptr, x); | |
126 | case 8: | |
127 | return __xchg_u64(ptr, x); | |
128 | } | |
129 | ||
130 | return x; | |
131 | } | |
132 | ||
133 | #define xchg(ptr, x) \ | |
134 | ({ \ | |
135 | BUILD_BUG_ON(sizeof(*(ptr)) & ~0xc); \ | |
136 | \ | |
137 | ((__typeof__(*(ptr))) \ | |
138 | __xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \ | |
139 | }) | |
fef74705 | 140 | |
fef74705 RB |
141 | #define __cmpxchg_asm(ld, st, m, old, new) \ |
142 | ({ \ | |
143 | __typeof(*(m)) __ret; \ | |
144 | \ | |
9eed4124 | 145 | if (kernel_uses_llsc && R10000_LLSC_WAR) { \ |
fef74705 RB |
146 | __asm__ __volatile__( \ |
147 | " .set push \n" \ | |
148 | " .set noat \n" \ | |
a809d460 | 149 | " .set arch=r4000 \n" \ |
70342287 | 150 | "1: " ld " %0, %2 # __cmpxchg_asm \n" \ |
fef74705 RB |
151 | " bne %0, %z3, 2f \n" \ |
152 | " .set mips0 \n" \ | |
153 | " move $1, %z4 \n" \ | |
a809d460 | 154 | " .set arch=r4000 \n" \ |
fef74705 RB |
155 | " " st " $1, %1 \n" \ |
156 | " beqzl $1, 1b \n" \ | |
157 | "2: \n" \ | |
158 | " .set pop \n" \ | |
94bfb75a MC |
159 | : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \ |
160 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \ | |
fef74705 | 161 | : "memory"); \ |
b791d119 | 162 | } else if (kernel_uses_llsc) { \ |
fef74705 RB |
163 | __asm__ __volatile__( \ |
164 | " .set push \n" \ | |
165 | " .set noat \n" \ | |
fa998ebb | 166 | " .set "MIPS_ISA_ARCH_LEVEL" \n" \ |
70342287 | 167 | "1: " ld " %0, %2 # __cmpxchg_asm \n" \ |
fef74705 RB |
168 | " bne %0, %z3, 2f \n" \ |
169 | " .set mips0 \n" \ | |
170 | " move $1, %z4 \n" \ | |
fa998ebb | 171 | " .set "MIPS_ISA_ARCH_LEVEL" \n" \ |
fef74705 | 172 | " " st " $1, %1 \n" \ |
7837314d | 173 | " beqz $1, 1b \n" \ |
fef74705 | 174 | " .set pop \n" \ |
7837314d | 175 | "2: \n" \ |
94bfb75a MC |
176 | : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \ |
177 | : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \ | |
fef74705 RB |
178 | : "memory"); \ |
179 | } else { \ | |
180 | unsigned long __flags; \ | |
181 | \ | |
182 | raw_local_irq_save(__flags); \ | |
183 | __ret = *m; \ | |
184 | if (__ret == old) \ | |
185 | *m = new; \ | |
186 | raw_local_irq_restore(__flags); \ | |
187 | } \ | |
188 | \ | |
189 | __ret; \ | |
190 | }) | |
191 | ||
192 | /* | |
193 | * This function doesn't exist, so you'll get a linker error | |
194 | * if something tries to do an invalid cmpxchg(). | |
195 | */ | |
196 | extern void __cmpxchg_called_with_bad_pointer(void); | |
197 | ||
f252ffd5 | 198 | #define __cmpxchg(ptr, old, new, pre_barrier, post_barrier) \ |
fef74705 RB |
199 | ({ \ |
200 | __typeof__(ptr) __ptr = (ptr); \ | |
201 | __typeof__(*(ptr)) __old = (old); \ | |
202 | __typeof__(*(ptr)) __new = (new); \ | |
203 | __typeof__(*(ptr)) __res = 0; \ | |
204 | \ | |
f252ffd5 | 205 | pre_barrier; \ |
fef74705 RB |
206 | \ |
207 | switch (sizeof(*(__ptr))) { \ | |
208 | case 4: \ | |
70342287 | 209 | __res = __cmpxchg_asm("ll", "sc", __ptr, __old, __new); \ |
fef74705 RB |
210 | break; \ |
211 | case 8: \ | |
212 | if (sizeof(long) == 8) { \ | |
213 | __res = __cmpxchg_asm("lld", "scd", __ptr, \ | |
214 | __old, __new); \ | |
215 | break; \ | |
216 | } \ | |
217 | default: \ | |
218 | __cmpxchg_called_with_bad_pointer(); \ | |
219 | break; \ | |
220 | } \ | |
221 | \ | |
f252ffd5 | 222 | post_barrier; \ |
fef74705 RB |
223 | \ |
224 | __res; \ | |
225 | }) | |
226 | ||
f252ffd5 DD |
227 | #define cmpxchg(ptr, old, new) __cmpxchg(ptr, old, new, smp_mb__before_llsc(), smp_llsc_mb()) |
228 | #define cmpxchg_local(ptr, old, new) __cmpxchg(ptr, old, new, , ) | |
fef74705 | 229 | |
e2093c7b DCZ |
230 | #ifdef CONFIG_64BIT |
231 | #define cmpxchg64_local(ptr, o, n) \ | |
3b96a56d MD |
232 | ({ \ |
233 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ | |
e2093c7b | 234 | cmpxchg_local((ptr), (o), (n)); \ |
3b96a56d MD |
235 | }) |
236 | ||
e2093c7b | 237 | #define cmpxchg64(ptr, o, n) \ |
3b96a56d MD |
238 | ({ \ |
239 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ | |
e2093c7b | 240 | cmpxchg((ptr), (o), (n)); \ |
3b96a56d MD |
241 | }) |
242 | #else | |
243 | #include <asm-generic/cmpxchg-local.h> | |
244 | #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) | |
e2093c7b | 245 | #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n)) |
3b96a56d MD |
246 | #endif |
247 | ||
fef74705 | 248 | #endif /* __ASM_CMPXCHG_H */ |