MIPS: KVM: Make various Cause variables 32-bit
[deliverable/linux.git] / arch / mips / include / asm / bitops.h
CommitLineData
1da177e4
LT
1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
102fa15c 6 * Copyright (c) 1994 - 1997, 99, 2000, 06, 07 Ralf Baechle (ralf@linux-mips.org)
1da177e4
LT
7 * Copyright (c) 1999, 2000 Silicon Graphics, Inc.
8 */
9#ifndef _ASM_BITOPS_H
10#define _ASM_BITOPS_H
11
0624517d
JS
12#ifndef _LINUX_BITOPS_H
13#error only <linux/bitops.h> can be included directly
14#endif
15
1da177e4
LT
16#include <linux/compiler.h>
17#include <linux/types.h>
0004a9df 18#include <asm/barrier.h>
1da177e4 19#include <asm/byteorder.h> /* sigh ... */
b0984c43 20#include <asm/compiler.h>
1da177e4 21#include <asm/cpu-features.h>
05490626 22#include <asm/llsc.h>
4ffd8b38
RB
23#include <asm/sgidefs.h>
24#include <asm/war.h>
1da177e4 25
92d11594
JQ
26/*
27 * These are the "slower" versions of the functions and are in bitops.c.
28 * These functions call raw_local_irq_{save,restore}().
29 */
30void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
31void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
32void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
33int __mips_test_and_set_bit(unsigned long nr,
34 volatile unsigned long *addr);
35int __mips_test_and_set_bit_lock(unsigned long nr,
36 volatile unsigned long *addr);
37int __mips_test_and_clear_bit(unsigned long nr,
38 volatile unsigned long *addr);
39int __mips_test_and_change_bit(unsigned long nr,
40 volatile unsigned long *addr);
41
42
1da177e4
LT
43/*
44 * set_bit - Atomically set a bit in memory
45 * @nr: the bit to set
46 * @addr: the address to start counting from
47 *
48 * This function is atomic and may not be reordered. See __set_bit()
49 * if you do not require the atomic guarantees.
50 * Note that @nr may be almost arbitrarily large; this function is not
51 * restricted to acting on a single-word quantity.
52 */
53static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
54{
55 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
9de79c50 56 int bit = nr & SZLONG_MASK;
1da177e4
LT
57 unsigned long temp;
58
b791d119 59 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 60 __asm__ __volatile__(
a809d460 61 " .set arch=r4000 \n"
1da177e4
LT
62 "1: " __LL "%0, %1 # set_bit \n"
63 " or %0, %2 \n"
aac8aa77 64 " " __SC "%0, %1 \n"
1da177e4 65 " beqzl %0, 1b \n"
aac8aa77 66 " .set mips0 \n"
94bfb75a
MC
67 : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
68 : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m));
87a927ef 69#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
b791d119 70 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
7837314d
RB
71 do {
72 __asm__ __volatile__(
73 " " __LL "%0, %1 # set_bit \n"
74 " " __INS "%0, %3, %2, 1 \n"
75 " " __SC "%0, %1 \n"
94bfb75a 76 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d
RB
77 : "ir" (bit), "r" (~0));
78 } while (unlikely(!temp));
87a927ef 79#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
b791d119 80 } else if (kernel_uses_llsc) {
7837314d
RB
81 do {
82 __asm__ __volatile__(
87a927ef 83 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
84 " " __LL "%0, %1 # set_bit \n"
85 " or %0, %2 \n"
86 " " __SC "%0, %1 \n"
87 " .set mips0 \n"
94bfb75a 88 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d
RB
89 : "ir" (1UL << bit));
90 } while (unlikely(!temp));
92d11594
JQ
91 } else
92 __mips_set_bit(nr, addr);
1da177e4
LT
93}
94
1da177e4
LT
95/*
96 * clear_bit - Clears a bit in memory
97 * @nr: Bit to clear
98 * @addr: Address to start counting from
99 *
100 * clear_bit() is atomic and may not be reordered. However, it does
101 * not contain a memory barrier, so if it is used for locking purposes,
91bbefe6 102 * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
1da177e4
LT
103 * in order to ensure changes are visible on other processors.
104 */
105static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
106{
107 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
9de79c50 108 int bit = nr & SZLONG_MASK;
1da177e4
LT
109 unsigned long temp;
110
b791d119 111 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 112 __asm__ __volatile__(
a809d460 113 " .set arch=r4000 \n"
1da177e4
LT
114 "1: " __LL "%0, %1 # clear_bit \n"
115 " and %0, %2 \n"
116 " " __SC "%0, %1 \n"
117 " beqzl %0, 1b \n"
aac8aa77 118 " .set mips0 \n"
94bfb75a 119 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d 120 : "ir" (~(1UL << bit)));
87a927ef 121#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
b791d119 122 } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
7837314d
RB
123 do {
124 __asm__ __volatile__(
125 " " __LL "%0, %1 # clear_bit \n"
126 " " __INS "%0, $0, %2, 1 \n"
127 " " __SC "%0, %1 \n"
94bfb75a 128 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d
RB
129 : "ir" (bit));
130 } while (unlikely(!temp));
87a927ef 131#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
b791d119 132 } else if (kernel_uses_llsc) {
7837314d
RB
133 do {
134 __asm__ __volatile__(
87a927ef 135 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
136 " " __LL "%0, %1 # clear_bit \n"
137 " and %0, %2 \n"
138 " " __SC "%0, %1 \n"
139 " .set mips0 \n"
94bfb75a 140 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d
RB
141 : "ir" (~(1UL << bit)));
142 } while (unlikely(!temp));
92d11594
JQ
143 } else
144 __mips_clear_bit(nr, addr);
1da177e4
LT
145}
146
728697cd
NP
147/*
148 * clear_bit_unlock - Clears a bit in memory
149 * @nr: Bit to clear
150 * @addr: Address to start counting from
151 *
152 * clear_bit() is atomic and implies release semantics before the memory
153 * operation. It can be used for an unlock.
154 */
155static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
156{
91bbefe6 157 smp_mb__before_atomic();
728697cd
NP
158 clear_bit(nr, addr);
159}
160
1da177e4
LT
161/*
162 * change_bit - Toggle a bit in memory
163 * @nr: Bit to change
164 * @addr: Address to start counting from
165 *
166 * change_bit() is atomic and may not be reordered.
167 * Note that @nr may be almost arbitrarily large; this function is not
168 * restricted to acting on a single-word quantity.
169 */
170static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
171{
9de79c50 172 int bit = nr & SZLONG_MASK;
b961153b 173
b791d119 174 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4
LT
175 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
176 unsigned long temp;
177
178 __asm__ __volatile__(
a809d460 179 " .set arch=r4000 \n"
1da177e4
LT
180 "1: " __LL "%0, %1 # change_bit \n"
181 " xor %0, %2 \n"
aac8aa77 182 " " __SC "%0, %1 \n"
1da177e4 183 " beqzl %0, 1b \n"
aac8aa77 184 " .set mips0 \n"
94bfb75a 185 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d 186 : "ir" (1UL << bit));
b791d119 187 } else if (kernel_uses_llsc) {
1da177e4
LT
188 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
189 unsigned long temp;
190
7837314d
RB
191 do {
192 __asm__ __volatile__(
87a927ef 193 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
194 " " __LL "%0, %1 # change_bit \n"
195 " xor %0, %2 \n"
196 " " __SC "%0, %1 \n"
197 " .set mips0 \n"
94bfb75a 198 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
7837314d
RB
199 : "ir" (1UL << bit));
200 } while (unlikely(!temp));
92d11594
JQ
201 } else
202 __mips_change_bit(nr, addr);
1da177e4
LT
203}
204
1da177e4
LT
205/*
206 * test_and_set_bit - Set a bit and return its old value
207 * @nr: Bit to set
208 * @addr: Address to count from
209 *
210 * This operation is atomic and cannot be reordered.
211 * It also implies a memory barrier.
212 */
213static inline int test_and_set_bit(unsigned long nr,
214 volatile unsigned long *addr)
215{
9de79c50 216 int bit = nr & SZLONG_MASK;
ff72b7a6 217 unsigned long res;
b961153b 218
f252ffd5 219 smp_mb__before_llsc();
c8f30ae5 220
b791d119 221 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 222 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 223 unsigned long temp;
1da177e4
LT
224
225 __asm__ __volatile__(
a809d460 226 " .set arch=r4000 \n"
1da177e4
LT
227 "1: " __LL "%0, %1 # test_and_set_bit \n"
228 " or %2, %0, %3 \n"
229 " " __SC "%2, %1 \n"
230 " beqzl %2, 1b \n"
231 " and %2, %0, %3 \n"
aac8aa77 232 " .set mips0 \n"
94bfb75a 233 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 234 : "r" (1UL << bit)
1da177e4 235 : "memory");
b791d119 236 } else if (kernel_uses_llsc) {
1da177e4 237 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 238 unsigned long temp;
1da177e4 239
7837314d
RB
240 do {
241 __asm__ __volatile__(
87a927ef 242 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
243 " " __LL "%0, %1 # test_and_set_bit \n"
244 " or %2, %0, %3 \n"
245 " " __SC "%2, %1 \n"
246 " .set mips0 \n"
94bfb75a 247 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d
RB
248 : "r" (1UL << bit)
249 : "memory");
250 } while (unlikely(!res));
251
252 res = temp & (1UL << bit);
92d11594
JQ
253 } else
254 res = __mips_test_and_set_bit(nr, addr);
0004a9df 255
17099b11 256 smp_llsc_mb();
ff72b7a6
RB
257
258 return res != 0;
1da177e4
LT
259}
260
728697cd
NP
261/*
262 * test_and_set_bit_lock - Set a bit and return its old value
263 * @nr: Bit to set
264 * @addr: Address to count from
265 *
266 * This operation is atomic and implies acquire ordering semantics
267 * after the memory operation.
268 */
269static inline int test_and_set_bit_lock(unsigned long nr,
270 volatile unsigned long *addr)
271{
9de79c50 272 int bit = nr & SZLONG_MASK;
728697cd
NP
273 unsigned long res;
274
b791d119 275 if (kernel_uses_llsc && R10000_LLSC_WAR) {
728697cd
NP
276 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
277 unsigned long temp;
278
279 __asm__ __volatile__(
a809d460 280 " .set arch=r4000 \n"
728697cd
NP
281 "1: " __LL "%0, %1 # test_and_set_bit \n"
282 " or %2, %0, %3 \n"
283 " " __SC "%2, %1 \n"
284 " beqzl %2, 1b \n"
285 " and %2, %0, %3 \n"
286 " .set mips0 \n"
7837314d
RB
287 : "=&r" (temp), "+m" (*m), "=&r" (res)
288 : "r" (1UL << bit)
728697cd 289 : "memory");
b791d119 290 } else if (kernel_uses_llsc) {
728697cd
NP
291 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
292 unsigned long temp;
293
7837314d
RB
294 do {
295 __asm__ __volatile__(
87a927ef 296 " .set "MIPS_ISA_ARCH_LEVEL" \n"
7837314d
RB
297 " " __LL "%0, %1 # test_and_set_bit \n"
298 " or %2, %0, %3 \n"
299 " " __SC "%2, %1 \n"
300 " .set mips0 \n"
94bfb75a 301 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d
RB
302 : "r" (1UL << bit)
303 : "memory");
304 } while (unlikely(!res));
305
306 res = temp & (1UL << bit);
92d11594
JQ
307 } else
308 res = __mips_test_and_set_bit_lock(nr, addr);
728697cd
NP
309
310 smp_llsc_mb();
311
312 return res != 0;
313}
1da177e4
LT
314/*
315 * test_and_clear_bit - Clear a bit and return its old value
316 * @nr: Bit to clear
317 * @addr: Address to count from
318 *
319 * This operation is atomic and cannot be reordered.
320 * It also implies a memory barrier.
321 */
322static inline int test_and_clear_bit(unsigned long nr,
323 volatile unsigned long *addr)
324{
9de79c50 325 int bit = nr & SZLONG_MASK;
ff72b7a6 326 unsigned long res;
b961153b 327
f252ffd5 328 smp_mb__before_llsc();
c8f30ae5 329
b791d119 330 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 331 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
8e09ffb6 332 unsigned long temp;
1da177e4
LT
333
334 __asm__ __volatile__(
a809d460 335 " .set arch=r4000 \n"
1da177e4
LT
336 "1: " __LL "%0, %1 # test_and_clear_bit \n"
337 " or %2, %0, %3 \n"
338 " xor %2, %3 \n"
70342287 339 " " __SC "%2, %1 \n"
1da177e4
LT
340 " beqzl %2, 1b \n"
341 " and %2, %0, %3 \n"
aac8aa77 342 " .set mips0 \n"
94bfb75a 343 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 344 : "r" (1UL << bit)
1da177e4 345 : "memory");
87a927ef 346#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
b791d119 347 } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
102fa15c 348 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 349 unsigned long temp;
102fa15c 350
7837314d
RB
351 do {
352 __asm__ __volatile__(
70342287 353 " " __LL "%0, %1 # test_and_clear_bit \n"
7837314d 354 " " __EXT "%2, %0, %3, 1 \n"
70342287
RB
355 " " __INS "%0, $0, %3, 1 \n"
356 " " __SC "%0, %1 \n"
94bfb75a 357 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d
RB
358 : "ir" (bit)
359 : "memory");
360 } while (unlikely(!temp));
102fa15c 361#endif
b791d119 362 } else if (kernel_uses_llsc) {
1da177e4 363 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 364 unsigned long temp;
1da177e4 365
7837314d
RB
366 do {
367 __asm__ __volatile__(
87a927ef 368 " .set "MIPS_ISA_ARCH_LEVEL" \n"
70342287 369 " " __LL "%0, %1 # test_and_clear_bit \n"
7837314d
RB
370 " or %2, %0, %3 \n"
371 " xor %2, %3 \n"
70342287 372 " " __SC "%2, %1 \n"
7837314d 373 " .set mips0 \n"
94bfb75a 374 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d
RB
375 : "r" (1UL << bit)
376 : "memory");
377 } while (unlikely(!res));
378
379 res = temp & (1UL << bit);
92d11594
JQ
380 } else
381 res = __mips_test_and_clear_bit(nr, addr);
0004a9df 382
17099b11 383 smp_llsc_mb();
ff72b7a6
RB
384
385 return res != 0;
1da177e4
LT
386}
387
1da177e4
LT
388/*
389 * test_and_change_bit - Change a bit and return its old value
390 * @nr: Bit to change
391 * @addr: Address to count from
392 *
393 * This operation is atomic and cannot be reordered.
394 * It also implies a memory barrier.
395 */
396static inline int test_and_change_bit(unsigned long nr,
397 volatile unsigned long *addr)
398{
9de79c50 399 int bit = nr & SZLONG_MASK;
ff72b7a6 400 unsigned long res;
b961153b 401
f252ffd5 402 smp_mb__before_llsc();
c8f30ae5 403
b791d119 404 if (kernel_uses_llsc && R10000_LLSC_WAR) {
1da177e4 405 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 406 unsigned long temp;
1da177e4
LT
407
408 __asm__ __volatile__(
a809d460 409 " .set arch=r4000 \n"
aac8aa77 410 "1: " __LL "%0, %1 # test_and_change_bit \n"
1da177e4 411 " xor %2, %0, %3 \n"
aac8aa77 412 " " __SC "%2, %1 \n"
1da177e4
LT
413 " beqzl %2, 1b \n"
414 " and %2, %0, %3 \n"
aac8aa77 415 " .set mips0 \n"
94bfb75a 416 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d 417 : "r" (1UL << bit)
1da177e4 418 : "memory");
b791d119 419 } else if (kernel_uses_llsc) {
1da177e4 420 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
ff72b7a6 421 unsigned long temp;
1da177e4 422
7837314d
RB
423 do {
424 __asm__ __volatile__(
87a927ef 425 " .set "MIPS_ISA_ARCH_LEVEL" \n"
70342287 426 " " __LL "%0, %1 # test_and_change_bit \n"
7837314d
RB
427 " xor %2, %0, %3 \n"
428 " " __SC "\t%2, %1 \n"
429 " .set mips0 \n"
94bfb75a 430 : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
7837314d
RB
431 : "r" (1UL << bit)
432 : "memory");
433 } while (unlikely(!res));
434
435 res = temp & (1UL << bit);
92d11594
JQ
436 } else
437 res = __mips_test_and_change_bit(nr, addr);
0004a9df 438
17099b11 439 smp_llsc_mb();
ff72b7a6
RB
440
441 return res != 0;
1da177e4
LT
442}
443
3c9ee7ef 444#include <asm-generic/bitops/non-atomic.h>
1da177e4 445
728697cd
NP
446/*
447 * __clear_bit_unlock - Clears a bit in memory
448 * @nr: Bit to clear
449 * @addr: Address to start counting from
450 *
451 * __clear_bit() is non-atomic and implies release semantics before the memory
452 * operation. It can be used for an unlock if no other CPUs can concurrently
453 * modify other bits in the word.
454 */
455static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
456{
6f6ed482 457 smp_mb__before_llsc();
728697cd
NP
458 __clear_bit(nr, addr);
459}
460
1da177e4 461/*
ec917c2c 462 * Return the bit position (0..63) of the most significant 1 bit in a word
65903265
RB
463 * Returns -1 if no 1 bit exists
464 */
4816227b 465static inline unsigned long __fls(unsigned long word)
65903265 466{
4816227b 467 int num;
65903265 468
cb5d4aad 469 if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
47740eb8 470 __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
49a89efb 471 __asm__(
ec917c2c 472 " .set push \n"
87a927ef 473 " .set "MIPS_ISA_LEVEL" \n"
ec917c2c
RB
474 " clz %0, %1 \n"
475 " .set pop \n"
4816227b
RB
476 : "=r" (num)
477 : "r" (word));
65903265 478
4816227b 479 return 31 - num;
ec917c2c
RB
480 }
481
cb5d4aad 482 if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
4816227b
RB
483 __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
484 __asm__(
485 " .set push \n"
87a927ef 486 " .set "MIPS_ISA_LEVEL" \n"
4816227b
RB
487 " dclz %0, %1 \n"
488 " .set pop \n"
489 : "=r" (num)
490 : "r" (word));
65903265 491
4816227b
RB
492 return 63 - num;
493 }
494
495 num = BITS_PER_LONG - 1;
65903265 496
4816227b
RB
497#if BITS_PER_LONG == 64
498 if (!(word & (~0ul << 32))) {
499 num -= 32;
500 word <<= 32;
501 }
502#endif
503 if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
504 num -= 16;
505 word <<= 16;
506 }
507 if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
508 num -= 8;
509 word <<= 8;
510 }
511 if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
512 num -= 4;
513 word <<= 4;
514 }
515 if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
516 num -= 2;
517 word <<= 2;
518 }
519 if (!(word & (~0ul << (BITS_PER_LONG-1))))
520 num -= 1;
521 return num;
65903265 522}
65903265
RB
523
524/*
525 * __ffs - find first bit in word.
1da177e4
LT
526 * @word: The word to search
527 *
65903265
RB
528 * Returns 0..SZLONG-1
529 * Undefined if no bit exists, so code should check against 0 first.
1da177e4 530 */
65903265 531static inline unsigned long __ffs(unsigned long word)
1da177e4 532{
ddc0d009 533 return __fls(word & -word);
1da177e4
LT
534}
535
536/*
bc818247 537 * fls - find last bit set.
1da177e4
LT
538 * @word: The word to search
539 *
bc818247
AN
540 * This is defined the same way as ffs.
541 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
1da177e4 542 */
4816227b 543static inline int fls(int x)
1da177e4 544{
4816227b 545 int r;
65903265 546
cb5d4aad
MR
547 if (!__builtin_constant_p(x) &&
548 __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
db873131
MR
549 __asm__(
550 " .set push \n"
87a927ef 551 " .set "MIPS_ISA_LEVEL" \n"
db873131
MR
552 " clz %0, %1 \n"
553 " .set pop \n"
554 : "=r" (x)
555 : "r" (x));
1da177e4 556
4816227b
RB
557 return 32 - x;
558 }
bc818247 559
4816227b
RB
560 r = 32;
561 if (!x)
562 return 0;
563 if (!(x & 0xffff0000u)) {
564 x <<= 16;
565 r -= 16;
566 }
567 if (!(x & 0xff000000u)) {
568 x <<= 8;
569 r -= 8;
570 }
571 if (!(x & 0xf0000000u)) {
572 x <<= 4;
573 r -= 4;
574 }
575 if (!(x & 0xc0000000u)) {
576 x <<= 2;
577 r -= 2;
578 }
579 if (!(x & 0x80000000u)) {
580 x <<= 1;
581 r -= 1;
582 }
583 return r;
65903265 584}
4816227b 585
bc818247 586#include <asm-generic/bitops/fls64.h>
65903265
RB
587
588/*
bc818247 589 * ffs - find first bit set.
65903265
RB
590 * @word: The word to search
591 *
bc818247
AN
592 * This is defined the same way as
593 * the libc and compiler builtin ffs routines, therefore
594 * differs in spirit from the above ffz (man ffs).
65903265 595 */
bc818247 596static inline int ffs(int word)
65903265 597{
bc818247
AN
598 if (!word)
599 return 0;
2caf1900 600
bc818247 601 return fls(word & -word);
65903265
RB
602}
603
bc818247 604#include <asm-generic/bitops/ffz.h>
3c9ee7ef 605#include <asm-generic/bitops/find.h>
1da177e4
LT
606
607#ifdef __KERNEL__
608
3c9ee7ef 609#include <asm-generic/bitops/sched.h>
1a403d1d
DD
610
611#include <asm/arch_hweight.h>
612#include <asm-generic/bitops/const_hweight.h>
613
861b5ae7 614#include <asm-generic/bitops/le.h>
3c9ee7ef 615#include <asm-generic/bitops/ext2-atomic.h>
1da177e4
LT
616
617#endif /* __KERNEL__ */
618
619#endif /* _ASM_BITOPS_H */
This page took 0.950389 seconds and 5 git commands to generate.