Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * This file is subject to the terms and conditions of the GNU General Public | |
3 | * License. See the file "COPYING" in the main directory of this archive | |
4 | * for more details. | |
5 | * | |
6 | * Copyright (c) 1994 - 1997, 1999, 2000 Ralf Baechle (ralf@gnu.org) | |
7 | * Copyright (c) 1999, 2000 Silicon Graphics, Inc. | |
8 | */ | |
9 | #ifndef _ASM_BITOPS_H | |
10 | #define _ASM_BITOPS_H | |
11 | ||
12 | #include <linux/config.h> | |
13 | #include <linux/compiler.h> | |
14 | #include <linux/types.h> | |
ec917c2c | 15 | #include <asm/bug.h> |
1da177e4 LT |
16 | #include <asm/byteorder.h> /* sigh ... */ |
17 | #include <asm/cpu-features.h> | |
18 | ||
19 | #if (_MIPS_SZLONG == 32) | |
20 | #define SZLONG_LOG 5 | |
21 | #define SZLONG_MASK 31UL | |
aac8aa77 MR |
22 | #define __LL "ll " |
23 | #define __SC "sc " | |
42a3b4f2 | 24 | #define cpu_to_lelongp(x) cpu_to_le32p((__u32 *) (x)) |
1da177e4 LT |
25 | #elif (_MIPS_SZLONG == 64) |
26 | #define SZLONG_LOG 6 | |
27 | #define SZLONG_MASK 63UL | |
aac8aa77 MR |
28 | #define __LL "lld " |
29 | #define __SC "scd " | |
42a3b4f2 | 30 | #define cpu_to_lelongp(x) cpu_to_le64p((__u64 *) (x)) |
1da177e4 LT |
31 | #endif |
32 | ||
33 | #ifdef __KERNEL__ | |
34 | ||
35 | #include <asm/interrupt.h> | |
36 | #include <asm/sgidefs.h> | |
37 | #include <asm/war.h> | |
38 | ||
39 | /* | |
40 | * clear_bit() doesn't provide any barrier for the compiler. | |
41 | */ | |
42 | #define smp_mb__before_clear_bit() smp_mb() | |
43 | #define smp_mb__after_clear_bit() smp_mb() | |
44 | ||
45 | /* | |
46 | * Only disable interrupt for kernel mode stuff to keep usermode stuff | |
47 | * that dares to use kernel include files alive. | |
48 | */ | |
49 | ||
50 | #define __bi_flags unsigned long flags | |
51 | #define __bi_local_irq_save(x) local_irq_save(x) | |
52 | #define __bi_local_irq_restore(x) local_irq_restore(x) | |
53 | #else | |
54 | #define __bi_flags | |
55 | #define __bi_local_irq_save(x) | |
56 | #define __bi_local_irq_restore(x) | |
57 | #endif /* __KERNEL__ */ | |
58 | ||
59 | /* | |
60 | * set_bit - Atomically set a bit in memory | |
61 | * @nr: the bit to set | |
62 | * @addr: the address to start counting from | |
63 | * | |
64 | * This function is atomic and may not be reordered. See __set_bit() | |
65 | * if you do not require the atomic guarantees. | |
66 | * Note that @nr may be almost arbitrarily large; this function is not | |
67 | * restricted to acting on a single-word quantity. | |
68 | */ | |
69 | static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | |
70 | { | |
71 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
72 | unsigned long temp; | |
73 | ||
74 | if (cpu_has_llsc && R10000_LLSC_WAR) { | |
75 | __asm__ __volatile__( | |
c4559f67 | 76 | " .set mips3 \n" |
1da177e4 LT |
77 | "1: " __LL "%0, %1 # set_bit \n" |
78 | " or %0, %2 \n" | |
aac8aa77 | 79 | " " __SC "%0, %1 \n" |
1da177e4 | 80 | " beqzl %0, 1b \n" |
aac8aa77 | 81 | " .set mips0 \n" |
1da177e4 LT |
82 | : "=&r" (temp), "=m" (*m) |
83 | : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); | |
84 | } else if (cpu_has_llsc) { | |
85 | __asm__ __volatile__( | |
c4559f67 | 86 | " .set mips3 \n" |
1da177e4 LT |
87 | "1: " __LL "%0, %1 # set_bit \n" |
88 | " or %0, %2 \n" | |
aac8aa77 | 89 | " " __SC "%0, %1 \n" |
1da177e4 | 90 | " beqz %0, 1b \n" |
aac8aa77 | 91 | " .set mips0 \n" |
1da177e4 LT |
92 | : "=&r" (temp), "=m" (*m) |
93 | : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); | |
94 | } else { | |
95 | volatile unsigned long *a = addr; | |
96 | unsigned long mask; | |
97 | __bi_flags; | |
98 | ||
99 | a += nr >> SZLONG_LOG; | |
100 | mask = 1UL << (nr & SZLONG_MASK); | |
101 | __bi_local_irq_save(flags); | |
102 | *a |= mask; | |
103 | __bi_local_irq_restore(flags); | |
104 | } | |
105 | } | |
106 | ||
1da177e4 LT |
107 | /* |
108 | * clear_bit - Clears a bit in memory | |
109 | * @nr: Bit to clear | |
110 | * @addr: Address to start counting from | |
111 | * | |
112 | * clear_bit() is atomic and may not be reordered. However, it does | |
113 | * not contain a memory barrier, so if it is used for locking purposes, | |
114 | * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit() | |
115 | * in order to ensure changes are visible on other processors. | |
116 | */ | |
117 | static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | |
118 | { | |
119 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
120 | unsigned long temp; | |
121 | ||
122 | if (cpu_has_llsc && R10000_LLSC_WAR) { | |
123 | __asm__ __volatile__( | |
c4559f67 | 124 | " .set mips3 \n" |
1da177e4 LT |
125 | "1: " __LL "%0, %1 # clear_bit \n" |
126 | " and %0, %2 \n" | |
127 | " " __SC "%0, %1 \n" | |
128 | " beqzl %0, 1b \n" | |
aac8aa77 | 129 | " .set mips0 \n" |
1da177e4 LT |
130 | : "=&r" (temp), "=m" (*m) |
131 | : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m)); | |
132 | } else if (cpu_has_llsc) { | |
133 | __asm__ __volatile__( | |
c4559f67 | 134 | " .set mips3 \n" |
1da177e4 LT |
135 | "1: " __LL "%0, %1 # clear_bit \n" |
136 | " and %0, %2 \n" | |
137 | " " __SC "%0, %1 \n" | |
138 | " beqz %0, 1b \n" | |
aac8aa77 | 139 | " .set mips0 \n" |
1da177e4 LT |
140 | : "=&r" (temp), "=m" (*m) |
141 | : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m)); | |
142 | } else { | |
143 | volatile unsigned long *a = addr; | |
144 | unsigned long mask; | |
145 | __bi_flags; | |
146 | ||
147 | a += nr >> SZLONG_LOG; | |
148 | mask = 1UL << (nr & SZLONG_MASK); | |
149 | __bi_local_irq_save(flags); | |
150 | *a &= ~mask; | |
151 | __bi_local_irq_restore(flags); | |
152 | } | |
153 | } | |
154 | ||
1da177e4 LT |
155 | /* |
156 | * change_bit - Toggle a bit in memory | |
157 | * @nr: Bit to change | |
158 | * @addr: Address to start counting from | |
159 | * | |
160 | * change_bit() is atomic and may not be reordered. | |
161 | * Note that @nr may be almost arbitrarily large; this function is not | |
162 | * restricted to acting on a single-word quantity. | |
163 | */ | |
164 | static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | |
165 | { | |
166 | if (cpu_has_llsc && R10000_LLSC_WAR) { | |
167 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
168 | unsigned long temp; | |
169 | ||
170 | __asm__ __volatile__( | |
c4559f67 | 171 | " .set mips3 \n" |
1da177e4 LT |
172 | "1: " __LL "%0, %1 # change_bit \n" |
173 | " xor %0, %2 \n" | |
aac8aa77 | 174 | " " __SC "%0, %1 \n" |
1da177e4 | 175 | " beqzl %0, 1b \n" |
aac8aa77 | 176 | " .set mips0 \n" |
1da177e4 LT |
177 | : "=&r" (temp), "=m" (*m) |
178 | : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); | |
179 | } else if (cpu_has_llsc) { | |
180 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
181 | unsigned long temp; | |
182 | ||
183 | __asm__ __volatile__( | |
c4559f67 | 184 | " .set mips3 \n" |
1da177e4 LT |
185 | "1: " __LL "%0, %1 # change_bit \n" |
186 | " xor %0, %2 \n" | |
aac8aa77 | 187 | " " __SC "%0, %1 \n" |
1da177e4 | 188 | " beqz %0, 1b \n" |
aac8aa77 | 189 | " .set mips0 \n" |
1da177e4 LT |
190 | : "=&r" (temp), "=m" (*m) |
191 | : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m)); | |
192 | } else { | |
193 | volatile unsigned long *a = addr; | |
194 | unsigned long mask; | |
195 | __bi_flags; | |
196 | ||
197 | a += nr >> SZLONG_LOG; | |
198 | mask = 1UL << (nr & SZLONG_MASK); | |
199 | __bi_local_irq_save(flags); | |
200 | *a ^= mask; | |
201 | __bi_local_irq_restore(flags); | |
202 | } | |
203 | } | |
204 | ||
1da177e4 LT |
205 | /* |
206 | * test_and_set_bit - Set a bit and return its old value | |
207 | * @nr: Bit to set | |
208 | * @addr: Address to count from | |
209 | * | |
210 | * This operation is atomic and cannot be reordered. | |
211 | * It also implies a memory barrier. | |
212 | */ | |
213 | static inline int test_and_set_bit(unsigned long nr, | |
214 | volatile unsigned long *addr) | |
215 | { | |
216 | if (cpu_has_llsc && R10000_LLSC_WAR) { | |
217 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
218 | unsigned long temp, res; | |
219 | ||
220 | __asm__ __volatile__( | |
c4559f67 | 221 | " .set mips3 \n" |
1da177e4 LT |
222 | "1: " __LL "%0, %1 # test_and_set_bit \n" |
223 | " or %2, %0, %3 \n" | |
224 | " " __SC "%2, %1 \n" | |
225 | " beqzl %2, 1b \n" | |
226 | " and %2, %0, %3 \n" | |
227 | #ifdef CONFIG_SMP | |
aac8aa77 | 228 | " sync \n" |
1da177e4 | 229 | #endif |
aac8aa77 | 230 | " .set mips0 \n" |
1da177e4 LT |
231 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
232 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) | |
233 | : "memory"); | |
234 | ||
235 | return res != 0; | |
236 | } else if (cpu_has_llsc) { | |
237 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
238 | unsigned long temp, res; | |
239 | ||
240 | __asm__ __volatile__( | |
aac8aa77 MR |
241 | " .set push \n" |
242 | " .set noreorder \n" | |
c4559f67 | 243 | " .set mips3 \n" |
aac8aa77 | 244 | "1: " __LL "%0, %1 # test_and_set_bit \n" |
1da177e4 LT |
245 | " or %2, %0, %3 \n" |
246 | " " __SC "%2, %1 \n" | |
247 | " beqz %2, 1b \n" | |
248 | " and %2, %0, %3 \n" | |
249 | #ifdef CONFIG_SMP | |
aac8aa77 | 250 | " sync \n" |
1da177e4 | 251 | #endif |
aac8aa77 | 252 | " .set pop \n" |
1da177e4 LT |
253 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
254 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) | |
255 | : "memory"); | |
256 | ||
257 | return res != 0; | |
258 | } else { | |
259 | volatile unsigned long *a = addr; | |
260 | unsigned long mask; | |
261 | int retval; | |
262 | __bi_flags; | |
263 | ||
264 | a += nr >> SZLONG_LOG; | |
265 | mask = 1UL << (nr & SZLONG_MASK); | |
266 | __bi_local_irq_save(flags); | |
267 | retval = (mask & *a) != 0; | |
268 | *a |= mask; | |
269 | __bi_local_irq_restore(flags); | |
270 | ||
271 | return retval; | |
272 | } | |
273 | } | |
274 | ||
1da177e4 LT |
275 | /* |
276 | * test_and_clear_bit - Clear a bit and return its old value | |
277 | * @nr: Bit to clear | |
278 | * @addr: Address to count from | |
279 | * | |
280 | * This operation is atomic and cannot be reordered. | |
281 | * It also implies a memory barrier. | |
282 | */ | |
283 | static inline int test_and_clear_bit(unsigned long nr, | |
284 | volatile unsigned long *addr) | |
285 | { | |
286 | if (cpu_has_llsc && R10000_LLSC_WAR) { | |
287 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
288 | unsigned long temp, res; | |
289 | ||
290 | __asm__ __volatile__( | |
c4559f67 | 291 | " .set mips3 \n" |
1da177e4 LT |
292 | "1: " __LL "%0, %1 # test_and_clear_bit \n" |
293 | " or %2, %0, %3 \n" | |
294 | " xor %2, %3 \n" | |
aac8aa77 | 295 | " " __SC "%2, %1 \n" |
1da177e4 LT |
296 | " beqzl %2, 1b \n" |
297 | " and %2, %0, %3 \n" | |
298 | #ifdef CONFIG_SMP | |
299 | " sync \n" | |
300 | #endif | |
aac8aa77 | 301 | " .set mips0 \n" |
1da177e4 LT |
302 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
303 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) | |
304 | : "memory"); | |
305 | ||
306 | return res != 0; | |
307 | } else if (cpu_has_llsc) { | |
308 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
309 | unsigned long temp, res; | |
310 | ||
311 | __asm__ __volatile__( | |
aac8aa77 MR |
312 | " .set push \n" |
313 | " .set noreorder \n" | |
c4559f67 | 314 | " .set mips3 \n" |
aac8aa77 | 315 | "1: " __LL "%0, %1 # test_and_clear_bit \n" |
1da177e4 LT |
316 | " or %2, %0, %3 \n" |
317 | " xor %2, %3 \n" | |
aac8aa77 | 318 | " " __SC "%2, %1 \n" |
1da177e4 LT |
319 | " beqz %2, 1b \n" |
320 | " and %2, %0, %3 \n" | |
321 | #ifdef CONFIG_SMP | |
322 | " sync \n" | |
323 | #endif | |
aac8aa77 | 324 | " .set pop \n" |
1da177e4 LT |
325 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
326 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) | |
327 | : "memory"); | |
328 | ||
329 | return res != 0; | |
330 | } else { | |
331 | volatile unsigned long *a = addr; | |
332 | unsigned long mask; | |
333 | int retval; | |
334 | __bi_flags; | |
335 | ||
336 | a += nr >> SZLONG_LOG; | |
337 | mask = 1UL << (nr & SZLONG_MASK); | |
338 | __bi_local_irq_save(flags); | |
339 | retval = (mask & *a) != 0; | |
340 | *a &= ~mask; | |
341 | __bi_local_irq_restore(flags); | |
342 | ||
343 | return retval; | |
344 | } | |
345 | } | |
346 | ||
1da177e4 LT |
347 | /* |
348 | * test_and_change_bit - Change a bit and return its old value | |
349 | * @nr: Bit to change | |
350 | * @addr: Address to count from | |
351 | * | |
352 | * This operation is atomic and cannot be reordered. | |
353 | * It also implies a memory barrier. | |
354 | */ | |
355 | static inline int test_and_change_bit(unsigned long nr, | |
356 | volatile unsigned long *addr) | |
357 | { | |
358 | if (cpu_has_llsc && R10000_LLSC_WAR) { | |
359 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
360 | unsigned long temp, res; | |
361 | ||
362 | __asm__ __volatile__( | |
c4559f67 | 363 | " .set mips3 \n" |
aac8aa77 | 364 | "1: " __LL "%0, %1 # test_and_change_bit \n" |
1da177e4 | 365 | " xor %2, %0, %3 \n" |
aac8aa77 | 366 | " " __SC "%2, %1 \n" |
1da177e4 LT |
367 | " beqzl %2, 1b \n" |
368 | " and %2, %0, %3 \n" | |
369 | #ifdef CONFIG_SMP | |
370 | " sync \n" | |
371 | #endif | |
aac8aa77 | 372 | " .set mips0 \n" |
1da177e4 LT |
373 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
374 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) | |
375 | : "memory"); | |
376 | ||
377 | return res != 0; | |
378 | } else if (cpu_has_llsc) { | |
379 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
380 | unsigned long temp, res; | |
381 | ||
382 | __asm__ __volatile__( | |
aac8aa77 MR |
383 | " .set push \n" |
384 | " .set noreorder \n" | |
c4559f67 | 385 | " .set mips3 \n" |
aac8aa77 | 386 | "1: " __LL "%0, %1 # test_and_change_bit \n" |
1da177e4 | 387 | " xor %2, %0, %3 \n" |
aac8aa77 | 388 | " " __SC "\t%2, %1 \n" |
1da177e4 LT |
389 | " beqz %2, 1b \n" |
390 | " and %2, %0, %3 \n" | |
391 | #ifdef CONFIG_SMP | |
392 | " sync \n" | |
393 | #endif | |
aac8aa77 | 394 | " .set pop \n" |
1da177e4 LT |
395 | : "=&r" (temp), "=m" (*m), "=&r" (res) |
396 | : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m) | |
397 | : "memory"); | |
398 | ||
399 | return res != 0; | |
400 | } else { | |
401 | volatile unsigned long *a = addr; | |
402 | unsigned long mask, retval; | |
403 | __bi_flags; | |
404 | ||
405 | a += nr >> SZLONG_LOG; | |
406 | mask = 1UL << (nr & SZLONG_MASK); | |
407 | __bi_local_irq_save(flags); | |
408 | retval = (mask & *a) != 0; | |
409 | *a ^= mask; | |
410 | __bi_local_irq_restore(flags); | |
411 | ||
412 | return retval; | |
413 | } | |
414 | } | |
415 | ||
1da177e4 LT |
416 | #undef __bi_flags |
417 | #undef __bi_local_irq_save | |
418 | #undef __bi_local_irq_restore | |
419 | ||
3c9ee7ef | 420 | #include <asm-generic/bitops/non-atomic.h> |
1da177e4 LT |
421 | |
422 | /* | |
ec917c2c | 423 | * Return the bit position (0..63) of the most significant 1 bit in a word |
65903265 RB |
424 | * Returns -1 if no 1 bit exists |
425 | */ | |
ec917c2c | 426 | static inline int __ilog2(unsigned long x) |
65903265 RB |
427 | { |
428 | int lz; | |
429 | ||
ec917c2c RB |
430 | if (sizeof(x) == 4) { |
431 | __asm__ ( | |
432 | " .set push \n" | |
433 | " .set mips32 \n" | |
434 | " clz %0, %1 \n" | |
435 | " .set pop \n" | |
436 | : "=r" (lz) | |
437 | : "r" (x)); | |
65903265 | 438 | |
ec917c2c RB |
439 | return 31 - lz; |
440 | } | |
441 | ||
442 | BUG_ON(sizeof(x) != 8); | |
65903265 RB |
443 | |
444 | __asm__ ( | |
445 | " .set push \n" | |
446 | " .set mips64 \n" | |
447 | " dclz %0, %1 \n" | |
448 | " .set pop \n" | |
449 | : "=r" (lz) | |
450 | : "r" (x)); | |
451 | ||
452 | return 63 - lz; | |
453 | } | |
65903265 | 454 | |
3c9ee7ef AM |
455 | #if defined(CONFIG_CPU_MIPS32) || defined(CONFIG_CPU_MIPS64) |
456 | ||
65903265 RB |
457 | /* |
458 | * __ffs - find first bit in word. | |
1da177e4 LT |
459 | * @word: The word to search |
460 | * | |
65903265 RB |
461 | * Returns 0..SZLONG-1 |
462 | * Undefined if no bit exists, so code should check against 0 first. | |
1da177e4 | 463 | */ |
65903265 | 464 | static inline unsigned long __ffs(unsigned long word) |
1da177e4 | 465 | { |
65903265 | 466 | return __ilog2(word & -word); |
1da177e4 LT |
467 | } |
468 | ||
469 | /* | |
bc818247 | 470 | * fls - find last bit set. |
1da177e4 LT |
471 | * @word: The word to search |
472 | * | |
bc818247 AN |
473 | * This is defined the same way as ffs. |
474 | * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32. | |
1da177e4 | 475 | */ |
bc818247 | 476 | static inline int fls(int word) |
1da177e4 | 477 | { |
bc818247 | 478 | __asm__ ("clz %0, %1" : "=r" (word) : "r" (word)); |
65903265 | 479 | |
bc818247 | 480 | return 32 - word; |
1da177e4 LT |
481 | } |
482 | ||
bc818247 AN |
483 | #if defined(CONFIG_64BIT) && defined(CONFIG_CPU_MIPS64) |
484 | static inline int fls64(__u64 word) | |
65903265 | 485 | { |
bc818247 AN |
486 | __asm__ ("dclz %0, %1" : "=r" (word) : "r" (word)); |
487 | ||
488 | return 64 - word; | |
65903265 | 489 | } |
bc818247 AN |
490 | #else |
491 | #include <asm-generic/bitops/fls64.h> | |
492 | #endif | |
65903265 RB |
493 | |
494 | /* | |
bc818247 | 495 | * ffs - find first bit set. |
65903265 RB |
496 | * @word: The word to search |
497 | * | |
bc818247 AN |
498 | * This is defined the same way as |
499 | * the libc and compiler builtin ffs routines, therefore | |
500 | * differs in spirit from the above ffz (man ffs). | |
65903265 | 501 | */ |
bc818247 | 502 | static inline int ffs(int word) |
65903265 | 503 | { |
bc818247 AN |
504 | if (!word) |
505 | return 0; | |
2caf1900 | 506 | |
bc818247 | 507 | return fls(word & -word); |
65903265 RB |
508 | } |
509 | ||
3c9ee7ef | 510 | #else |
1da177e4 | 511 | |
3c9ee7ef AM |
512 | #include <asm-generic/bitops/__ffs.h> |
513 | #include <asm-generic/bitops/ffs.h> | |
3c9ee7ef | 514 | #include <asm-generic/bitops/fls.h> |
bc818247 | 515 | #include <asm-generic/bitops/fls64.h> |
1da177e4 | 516 | |
3c9ee7ef | 517 | #endif /*defined(CONFIG_CPU_MIPS32) || defined(CONFIG_CPU_MIPS64) */ |
1da177e4 | 518 | |
bc818247 | 519 | #include <asm-generic/bitops/ffz.h> |
3c9ee7ef | 520 | #include <asm-generic/bitops/find.h> |
1da177e4 LT |
521 | |
522 | #ifdef __KERNEL__ | |
523 | ||
3c9ee7ef AM |
524 | #include <asm-generic/bitops/sched.h> |
525 | #include <asm-generic/bitops/hweight.h> | |
526 | #include <asm-generic/bitops/ext2-non-atomic.h> | |
527 | #include <asm-generic/bitops/ext2-atomic.h> | |
528 | #include <asm-generic/bitops/minix.h> | |
1da177e4 LT |
529 | |
530 | #endif /* __KERNEL__ */ | |
531 | ||
532 | #endif /* _ASM_BITOPS_H */ |