MIPS: i8259: DT support
[deliverable/linux.git] / arch / mips / include / asm / bitops.h
CommitLineData
1da177e4
LT
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
102fa15c 6 * Copyright (c) 1994 - 1997, 99, 2000, 06, 07 Ralf Baechle (ralf@linux-mips.org)
1da177e4
LT
7 * Copyright (c) 1999, 2000 Silicon Graphics, Inc.
8 */
9#ifndef _ASM_BITOPS_H
10#define _ASM_BITOPS_H
11
0624517d
JS
12#ifndef _LINUX_BITOPS_H
13#error only <linux/bitops.h> can be included directly
14#endif
15
1da177e4
LT
16#include <linux/compiler.h>
17#include <linux/types.h>
0004a9df 18#include <asm/barrier.h>
1da177e4 19#include <asm/byteorder.h> /* sigh ... */
b0984c43 20#include <asm/compiler.h>
1da177e4 21#include <asm/cpu-features.h>
4ffd8b38
RB
22#include <asm/sgidefs.h>
23#include <asm/war.h>
1da177e4 24
49a89efb 25#if _MIPS_SZLONG == 32
1da177e4
LT
26#define SZLONG_LOG 5
27#define SZLONG_MASK 31UL
aac8aa77
MR
28#define __LL "ll "
29#define __SC "sc "
70342287
RB
30#define __INS "ins "
31#define __EXT "ext "
49a89efb 32#elif _MIPS_SZLONG == 64
1da177e4
LT
33#define SZLONG_LOG 6
34#define SZLONG_MASK 63UL
aac8aa77
MR
35#define __LL "lld "
36#define __SC "scd "
70342287
RB
37#define __INS "dins "
38#define __EXT "dext "
1da177e4
LT
39#endif
40
92d11594
JQ
41/*
42 * These are the "slower" versions of the functions and are in bitops.c.
43 * These functions call raw_local_irq_{save,restore}().
44 */
45void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
46void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
47void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
48int __mips_test_and_set_bit(unsigned long nr,
49 volatile unsigned long *addr);
50int __mips_test_and_set_bit_lock(unsigned long nr,
51 volatile unsigned long *addr);
52int __mips_test_and_clear_bit(unsigned long nr,
53 volatile unsigned long *addr);
54int __mips_test_and_change_bit(unsigned long nr,
55 volatile unsigned long *addr);
56
57
1da177e4
LT
58/*
59 * set_bit - Atomically set a bit in memory
60 * @nr: the bit to set
61 * @addr: the address to start counting from
62 *
63 * This function is atomic and may not be reordered. See __set_bit()
64 * if you do not require the atomic guarantees.
65 * Note that @nr may be almost arbitrarily large; this function is not
66 * restricted to acting on a single-word quantity.
67 */
68static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
69{
70 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
9de79c50 71 int bit = nr & SZLONG_MASK;
1da177e4
LT
72 unsigned long temp;
73
b791d119 74 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 75 __asm__ __volatile__(
a809d460 76 " .set arch=r4000 \n"
1da177e4
LT
77 "1: " __LL "%0, %1 # set_bit \n"
78 " or %0, %2 \n"
aac8aa77 79 " " __SC "%0, %1 \n"
1da177e4 80 " beqzl %0, 1b \n"
aac8aa77 81 " .set mips0 \n"
94bfb75a
MC
82 : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
83 : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m));
87a927ef 84#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
b791d119 85 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
7837314d
RB
86 do {
87 __asm__ __volatile__(
88 " " __LL "%0, %1 # set_bit \n"
89 " " __INS "%0, %3, %2, 1 \n"
90 " " __SC "%0, %1 \n"
94bfb75a 91 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d
RB
92 : "ir" (bit), "r" (~0));
93 } while (unlikely(!temp));
87a927ef 94#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
b791d119 95 } else if (kernel_uses_llsc) {
7837314d
RB
96 do {
97 __asm__ __volatile__(
87a927ef 98 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
99 " " __LL "%0, %1 # set_bit \n"
100 " or %0, %2 \n"
101 " " __SC "%0, %1 \n"
102 " .set mips0 \n"
94bfb75a 103 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d
RB
104 : "ir" (1UL << bit));
105 } while (unlikely(!temp));
92d11594
JQ
106 } else
107 __mips_set_bit(nr, addr);
1da177e4
LT
108}
109
1da177e4
LT
110/*
111 * clear_bit - Clears a bit in memory
112 * @nr: Bit to clear
113 * @addr: Address to start counting from
114 *
115 * clear_bit() is atomic and may not be reordered. However, it does
116 * not contain a memory barrier, so if it is used for locking purposes,
91bbefe6 117 * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
1da177e4
LT
118 * in order to ensure changes are visible on other processors.
119 */
120static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
121{
122 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
9de79c50 123 int bit = nr & SZLONG_MASK;
1da177e4
LT
124 unsigned long temp;
125
b791d119 126 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 127 __asm__ __volatile__(
a809d460 128 " .set arch=r4000 \n"
1da177e4
LT
129 "1: " __LL "%0, %1 # clear_bit \n"
130 " and %0, %2 \n"
131 " " __SC "%0, %1 \n"
132 " beqzl %0, 1b \n"
aac8aa77 133 " .set mips0 \n"
94bfb75a 134 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d 135 : "ir" (~(1UL << bit)));
87a927ef 136#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
b791d119 137 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
7837314d
RB
138 do {
139 __asm__ __volatile__(
140 " " __LL "%0, %1 # clear_bit \n"
141 " " __INS "%0, $0, %2, 1 \n"
142 " " __SC "%0, %1 \n"
94bfb75a 143 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d
RB
144 : "ir" (bit));
145 } while (unlikely(!temp));
87a927ef 146#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
b791d119 147 } else if (kernel_uses_llsc) {
7837314d
RB
148 do {
149 __asm__ __volatile__(
87a927ef 150 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
151 " " __LL "%0, %1 # clear_bit \n"
152 " and %0, %2 \n"
153 " " __SC "%0, %1 \n"
154 " .set mips0 \n"
94bfb75a 155 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d
RB
156 : "ir" (~(1UL << bit)));
157 } while (unlikely(!temp));
92d11594
JQ
158 } else
159 __mips_clear_bit(nr, addr);
1da177e4
LT
160}
161
728697cd
NP
162/*
163 * clear_bit_unlock - Clears a bit in memory
164 * @nr: Bit to clear
165 * @addr: Address to start counting from
166 *
167 * clear_bit() is atomic and implies release semantics before the memory
168 * operation. It can be used for an unlock.
169 */
170static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
171{
91bbefe6 172 smp_mb__before_atomic();
728697cd
NP
173 clear_bit(nr, addr);
174}
175
1da177e4
LT
176/*
177 * change_bit - Toggle a bit in memory
178 * @nr: Bit to change
179 * @addr: Address to start counting from
180 *
181 * change_bit() is atomic and may not be reordered.
182 * Note that @nr may be almost arbitrarily large; this function is not
183 * restricted to acting on a single-word quantity.
184 */
185static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
186{
9de79c50 187 int bit = nr & SZLONG_MASK;
b961153b 188
b791d119 189 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4
LT
190 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
191 unsigned long temp;
192
193 __asm__ __volatile__(
a809d460 194 " .set arch=r4000 \n"
1da177e4
LT
195 "1: " __LL "%0, %1 # change_bit \n"
196 " xor %0, %2 \n"
aac8aa77 197 " " __SC "%0, %1 \n"
1da177e4 198 " beqzl %0, 1b \n"
aac8aa77 199 " .set mips0 \n"
94bfb75a 200 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d 201 : "ir" (1UL << bit));
b791d119 202 } else if (kernel_uses_llsc) {
1da177e4
LT
203 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
204 unsigned long temp;
205
7837314d
RB
206 do {
207 __asm__ __volatile__(
87a927ef 208 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
209 " " __LL "%0, %1 # change_bit \n"
210 " xor %0, %2 \n"
211 " " __SC "%0, %1 \n"
212 " .set mips0 \n"
94bfb75a 213 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d
RB
214 : "ir" (1UL << bit));
215 } while (unlikely(!temp));
92d11594
JQ
216 } else
217 __mips_change_bit(nr, addr);
1da177e4
LT
218}
219
1da177e4
LT
220/*
221 * test_and_set_bit - Set a bit and return its old value
222 * @nr: Bit to set
223 * @addr: Address to count from
224 *
225 * This operation is atomic and cannot be reordered.
226 * It also implies a memory barrier.
227 */
228static inline int test_and_set_bit(unsigned long nr,
229 volatile unsigned long *addr)
230{
9de79c50 231 int bit = nr & SZLONG_MASK;
ff72b7a6 232 unsigned long res;
b961153b 233
f252ffd5 234 smp_mb__before_llsc();
c8f30ae5 235
b791d119 236 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 237 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 238 unsigned long temp;
1da177e4
LT
239
240 __asm__ __volatile__(
a809d460 241 " .set arch=r4000 \n"
1da177e4
LT
242 "1: " __LL "%0, %1 # test_and_set_bit \n"
243 " or %2, %0, %3 \n"
244 " " __SC "%2, %1 \n"
245 " beqzl %2, 1b \n"
246 " and %2, %0, %3 \n"
aac8aa77 247 " .set mips0 \n"
94bfb75a 248 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 249 : "r" (1UL << bit)
1da177e4 250 : "memory");
b791d119 251 } else if (kernel_uses_llsc) {
1da177e4 252 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 253 unsigned long temp;
1da177e4 254
7837314d
RB
255 do {
256 __asm__ __volatile__(
87a927ef 257 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
258 " " __LL "%0, %1 # test_and_set_bit \n"
259 " or %2, %0, %3 \n"
260 " " __SC "%2, %1 \n"
261 " .set mips0 \n"
94bfb75a 262 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d
RB
263 : "r" (1UL << bit)
264 : "memory");
265 } while (unlikely(!res));
266
267 res = temp & (1UL << bit);
92d11594
JQ
268 } else
269 res = __mips_test_and_set_bit(nr, addr);
0004a9df 270
17099b11 271 smp_llsc_mb();
ff72b7a6
RB
272
273 return res != 0;
1da177e4
LT
274}
275
728697cd
NP
276/*
277 * test_and_set_bit_lock - Set a bit and return its old value
278 * @nr: Bit to set
279 * @addr: Address to count from
280 *
281 * This operation is atomic and implies acquire ordering semantics
282 * after the memory operation.
283 */
284static inline int test_and_set_bit_lock(unsigned long nr,
285 volatile unsigned long *addr)
286{
9de79c50 287 int bit = nr & SZLONG_MASK;
728697cd
NP
288 unsigned long res;
289
b791d119 290 if (kernel_uses_llsc && R10000_LLSC_WAR) {
728697cd
NP
291 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
292 unsigned long temp;
293
294 __asm__ __volatile__(
a809d460 295 " .set arch=r4000 \n"
728697cd
NP
296 "1: " __LL "%0, %1 # test_and_set_bit \n"
297 " or %2, %0, %3 \n"
298 " " __SC "%2, %1 \n"
299 " beqzl %2, 1b \n"
300 " and %2, %0, %3 \n"
301 " .set mips0 \n"
7837314d
RB
302 : "=&r" (temp), "+m" (*m), "=&r" (res)
303 : "r" (1UL << bit)
728697cd 304 : "memory");
b791d119 305 } else if (kernel_uses_llsc) {
728697cd
NP
306 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
307 unsigned long temp;
308
7837314d
RB
309 do {
310 __asm__ __volatile__(
87a927ef 311 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
312 " " __LL "%0, %1 # test_and_set_bit \n"
313 " or %2, %0, %3 \n"
314 " " __SC "%2, %1 \n"
315 " .set mips0 \n"
94bfb75a 316 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d
RB
317 : "r" (1UL << bit)
318 : "memory");
319 } while (unlikely(!res));
320
321 res = temp & (1UL << bit);
92d11594
JQ
322 } else
323 res = __mips_test_and_set_bit_lock(nr, addr);
728697cd
NP
324
325 smp_llsc_mb();
326
327 return res != 0;
328}
1da177e4
LT
329/*
330 * test_and_clear_bit - Clear a bit and return its old value
331 * @nr: Bit to clear
332 * @addr: Address to count from
333 *
334 * This operation is atomic and cannot be reordered.
335 * It also implies a memory barrier.
336 */
337static inline int test_and_clear_bit(unsigned long nr,
338 volatile unsigned long *addr)
339{
9de79c50 340 int bit = nr & SZLONG_MASK;
ff72b7a6 341 unsigned long res;
b961153b 342
f252ffd5 343 smp_mb__before_llsc();
c8f30ae5 344
b791d119 345 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 346 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
8e09ffb6 347 unsigned long temp;
1da177e4
LT
348
349 __asm__ __volatile__(
a809d460 350 " .set arch=r4000 \n"
1da177e4
LT
351 "1: " __LL "%0, %1 # test_and_clear_bit \n"
352 " or %2, %0, %3 \n"
353 " xor %2, %3 \n"
70342287 354 " " __SC "%2, %1 \n"
1da177e4
LT
355 " beqzl %2, 1b \n"
356 " and %2, %0, %3 \n"
aac8aa77 357 " .set mips0 \n"
94bfb75a 358 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 359 : "r" (1UL << bit)
1da177e4 360 : "memory");
87a927ef 361#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
b791d119 362 } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
102fa15c 363 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 364 unsigned long temp;
102fa15c 365
7837314d
RB
366 do {
367 __asm__ __volatile__(
70342287 368 " " __LL "%0, %1 # test_and_clear_bit \n"
7837314d 369 " " __EXT "%2, %0, %3, 1 \n"
70342287
RB
370 " " __INS "%0, $0, %3, 1 \n"
371 " " __SC "%0, %1 \n"
94bfb75a 372 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d
RB
373 : "ir" (bit)
374 : "memory");
375 } while (unlikely(!temp));
102fa15c 376#endif
b791d119 377 } else if (kernel_uses_llsc) {
1da177e4 378 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 379 unsigned long temp;
1da177e4 380
7837314d
RB
381 do {
382 __asm__ __volatile__(
87a927ef 383 " .set "MIPS_ISA_ARCH_LEVEL" \n"
70342287 384 " " __LL "%0, %1 # test_and_clear_bit \n"
7837314d
RB
385 " or %2, %0, %3 \n"
386 " xor %2, %3 \n"
70342287 387 " " __SC "%2, %1 \n"
7837314d 388 " .set mips0 \n"
94bfb75a 389 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d
RB
390 : "r" (1UL << bit)
391 : "memory");
392 } while (unlikely(!res));
393
394 res = temp & (1UL << bit);
92d11594
JQ
395 } else
396 res = __mips_test_and_clear_bit(nr, addr);
0004a9df 397
17099b11 398 smp_llsc_mb();
ff72b7a6
RB
399
400 return res != 0;
1da177e4
LT
401}
402
1da177e4
LT
403/*
404 * test_and_change_bit - Change a bit and return its old value
405 * @nr: Bit to change
406 * @addr: Address to count from
407 *
408 * This operation is atomic and cannot be reordered.
409 * It also implies a memory barrier.
410 */
411static inline int test_and_change_bit(unsigned long nr,
412 volatile unsigned long *addr)
413{
9de79c50 414 int bit = nr & SZLONG_MASK;
ff72b7a6 415 unsigned long res;
b961153b 416
f252ffd5 417 smp_mb__before_llsc();
c8f30ae5 418
b791d119 419 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 420 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 421 unsigned long temp;
1da177e4
LT
422
423 __asm__ __volatile__(
a809d460 424 " .set arch=r4000 \n"
aac8aa77 425 "1: " __LL "%0, %1 # test_and_change_bit \n"
1da177e4 426 " xor %2, %0, %3 \n"
aac8aa77 427 " " __SC "%2, %1 \n"
1da177e4
LT
428 " beqzl %2, 1b \n"
429 " and %2, %0, %3 \n"
aac8aa77 430 " .set mips0 \n"
94bfb75a 431 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 432 : "r" (1UL << bit)
1da177e4 433 : "memory");
b791d119 434 } else if (kernel_uses_llsc) {
1da177e4 435 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 436 unsigned long temp;
1da177e4 437
7837314d
RB
438 do {
439 __asm__ __volatile__(
87a927ef 440 " .set "MIPS_ISA_ARCH_LEVEL" \n"
70342287 441 " " __LL "%0, %1 # test_and_change_bit \n"
7837314d
RB
442 " xor %2, %0, %3 \n"
443 " " __SC "\t%2, %1 \n"
444 " .set mips0 \n"
94bfb75a 445 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d
RB
446 : "r" (1UL << bit)
447 : "memory");
448 } while (unlikely(!res));
449
450 res = temp & (1UL << bit);
92d11594
JQ
451 } else
452 res = __mips_test_and_change_bit(nr, addr);
0004a9df 453
17099b11 454 smp_llsc_mb();
ff72b7a6
RB
455
456 return res != 0;
1da177e4
LT
457}
458
3c9ee7ef 459#include <asm-generic/bitops/non-atomic.h>
1da177e4 460
728697cd
NP
461/*
462 * __clear_bit_unlock - Clears a bit in memory
463 * @nr: Bit to clear
464 * @addr: Address to start counting from
465 *
466 * __clear_bit() is non-atomic and implies release semantics before the memory
467 * operation. It can be used for an unlock if no other CPUs can concurrently
468 * modify other bits in the word.
469 */
470static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
471{
472 smp_mb();
473 __clear_bit(nr, addr);
474}
475
1da177e4 476/*
ec917c2c 477 * Return the bit position (0..63) of the most significant 1 bit in a word
65903265
RB
478 * Returns -1 if no 1 bit exists
479 */
4816227b 480static inline unsigned long __fls(unsigned long word)
65903265 481{
4816227b 482 int num;
65903265 483
cb5d4aad 484 if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
47740eb8 485 __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
49a89efb 486 __asm__(
ec917c2c 487 " .set push \n"
87a927ef 488 " .set "MIPS_ISA_LEVEL" \n"
ec917c2c
RB
489 " clz %0, %1 \n"
490 " .set pop \n"
4816227b
RB
491 : "=r" (num)
492 : "r" (word));
65903265 493
4816227b 494 return 31 - num;
ec917c2c
RB
495 }
496
cb5d4aad 497 if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
4816227b
RB
498 __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
499 __asm__(
500 " .set push \n"
87a927ef 501 " .set "MIPS_ISA_LEVEL" \n"
4816227b
RB
502 " dclz %0, %1 \n"
503 " .set pop \n"
504 : "=r" (num)
505 : "r" (word));
65903265 506
4816227b
RB
507 return 63 - num;
508 }
509
510 num = BITS_PER_LONG - 1;
65903265 511
4816227b
RB
512#if BITS_PER_LONG == 64
513 if (!(word & (~0ul << 32))) {
514 num -= 32;
515 word <<= 32;
516 }
517#endif
518 if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
519 num -= 16;
520 word <<= 16;
521 }
522 if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
523 num -= 8;
524 word <<= 8;
525 }
526 if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
527 num -= 4;
528 word <<= 4;
529 }
530 if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
531 num -= 2;
532 word <<= 2;
533 }
534 if (!(word & (~0ul << (BITS_PER_LONG-1))))
535 num -= 1;
536 return num;
65903265 537}
65903265
RB
538
539/*
540 * __ffs - find first bit in word.
1da177e4
LT
541 * @word: The word to search
542 *
65903265
RB
543 * Returns 0..SZLONG-1
544 * Undefined if no bit exists, so code should check against 0 first.
1da177e4 545 */
65903265 546static inline unsigned long __ffs(unsigned long word)
1da177e4 547{
ddc0d009 548 return __fls(word & -word);
1da177e4
LT
549}
550
551/*
bc818247 552 * fls - find last bit set.
1da177e4
LT
553 * @word: The word to search
554 *
bc818247
AN
555 * This is defined the same way as ffs.
556 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
1da177e4 557 */
4816227b 558static inline int fls(int x)
1da177e4 559{
4816227b 560 int r;
65903265 561
cb5d4aad
MR
562 if (!__builtin_constant_p(x) &&
563 __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
db873131
MR
564 __asm__(
565 " .set push \n"
87a927ef 566 " .set "MIPS_ISA_LEVEL" \n"
db873131
MR
567 " clz %0, %1 \n"
568 " .set pop \n"
569 : "=r" (x)
570 : "r" (x));
1da177e4 571
4816227b
RB
572 return 32 - x;
573 }
bc818247 574
4816227b
RB
575 r = 32;
576 if (!x)
577 return 0;
578 if (!(x & 0xffff0000u)) {
579 x <<= 16;
580 r -= 16;
581 }
582 if (!(x & 0xff000000u)) {
583 x <<= 8;
584 r -= 8;
585 }
586 if (!(x & 0xf0000000u)) {
587 x <<= 4;
588 r -= 4;
589 }
590 if (!(x & 0xc0000000u)) {
591 x <<= 2;
592 r -= 2;
593 }
594 if (!(x & 0x80000000u)) {
595 x <<= 1;
596 r -= 1;
597 }
598 return r;
65903265 599}
4816227b 600
bc818247 601#include <asm-generic/bitops/fls64.h>
65903265
RB
602
603/*
bc818247 604 * ffs - find first bit set.
65903265
RB
605 * @word: The word to search
606 *
bc818247
AN
607 * This is defined the same way as
608 * the libc and compiler builtin ffs routines, therefore
609 * differs in spirit from the above ffz (man ffs).
65903265 610 */
bc818247 611static inline int ffs(int word)
65903265 612{
bc818247
AN
613 if (!word)
614 return 0;
2caf1900 615
bc818247 616 return fls(word & -word);
65903265
RB
617}
618
bc818247 619#include <asm-generic/bitops/ffz.h>
3c9ee7ef 620#include <asm-generic/bitops/find.h>
1da177e4
LT
621
622#ifdef __KERNEL__
623
3c9ee7ef 624#include <asm-generic/bitops/sched.h>
1a403d1d
DD
625
626#include <asm/arch_hweight.h>
627#include <asm-generic/bitops/const_hweight.h>
628
861b5ae7 629#include <asm-generic/bitops/le.h>
3c9ee7ef 630#include <asm-generic/bitops/ext2-atomic.h>
1da177e4
LT
631
632#endif /* __KERNEL__ */
633
634#endif /* _ASM_BITOPS_H */
This page took 0.795507 seconds and 5 git commands to generate.