Merge remote-tracking branch 'asoc/topic/davinci' into asoc-next
[deliverable/linux.git] / arch / xtensa / include / asm / bitops.h
1 /*
2 * include/asm-xtensa/bitops.h
3 *
4 * Atomic operations that C can't guarantee us.Useful for resource counting etc.
5 *
6 * This file is subject to the terms and conditions of the GNU General Public
7 * License. See the file "COPYING" in the main directory of this archive
8 * for more details.
9 *
10 * Copyright (C) 2001 - 2007 Tensilica Inc.
11 */
12
13 #ifndef _XTENSA_BITOPS_H
14 #define _XTENSA_BITOPS_H
15
16 #ifdef __KERNEL__
17
18 #ifndef _LINUX_BITOPS_H
19 #error only <linux/bitops.h> can be included directly
20 #endif
21
22 #include <asm/processor.h>
23 #include <asm/byteorder.h>
24
25 #define smp_mb__before_clear_bit() smp_mb()
26 #define smp_mb__after_clear_bit() smp_mb()
27
28 #include <asm-generic/bitops/non-atomic.h>
29
30 #if XCHAL_HAVE_NSA
31
32 static inline unsigned long __cntlz (unsigned long x)
33 {
34 int lz;
35 asm ("nsau %0, %1" : "=r" (lz) : "r" (x));
36 return lz;
37 }
38
39 /*
40 * ffz: Find first zero in word. Undefined if no zero exists.
41 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
42 */
43
44 static inline int ffz(unsigned long x)
45 {
46 return 31 - __cntlz(~x & -~x);
47 }
48
49 /*
50 * __ffs: Find first bit set in word. Return 0 for bit 0
51 */
52
53 static inline int __ffs(unsigned long x)
54 {
55 return 31 - __cntlz(x & -x);
56 }
57
58 /*
59 * ffs: Find first bit set in word. This is defined the same way as
60 * the libc and compiler builtin ffs routines, therefore
61 * differs in spirit from the above ffz (man ffs).
62 */
63
64 static inline int ffs(unsigned long x)
65 {
66 return 32 - __cntlz(x & -x);
67 }
68
69 /*
70 * fls: Find last (most-significant) bit set in word.
71 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
72 */
73
74 static inline int fls (unsigned int x)
75 {
76 return 32 - __cntlz(x);
77 }
78
79 /**
80 * __fls - find last (most-significant) set bit in a long word
81 * @word: the word to search
82 *
83 * Undefined if no set bit exists, so code should check against 0 first.
84 */
85 static inline unsigned long __fls(unsigned long word)
86 {
87 return 31 - __cntlz(word);
88 }
89 #else
90
91 /* Use the generic implementation if we don't have the nsa/nsau instructions. */
92
93 # include <asm-generic/bitops/ffs.h>
94 # include <asm-generic/bitops/__ffs.h>
95 # include <asm-generic/bitops/ffz.h>
96 # include <asm-generic/bitops/fls.h>
97 # include <asm-generic/bitops/__fls.h>
98
99 #endif
100
101 #include <asm-generic/bitops/fls64.h>
102
103 #if XCHAL_HAVE_S32C1I
104
105 static inline void set_bit(unsigned int bit, volatile unsigned long *p)
106 {
107 unsigned long tmp, value;
108 unsigned long mask = 1UL << (bit & 31);
109
110 p += bit >> 5;
111
112 __asm__ __volatile__(
113 "1: l32i %1, %3, 0\n"
114 " wsr %1, scompare1\n"
115 " or %0, %1, %2\n"
116 " s32c1i %0, %3, 0\n"
117 " bne %0, %1, 1b\n"
118 : "=&a" (tmp), "=&a" (value)
119 : "a" (mask), "a" (p)
120 : "memory");
121 }
122
123 static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
124 {
125 unsigned long tmp, value;
126 unsigned long mask = 1UL << (bit & 31);
127
128 p += bit >> 5;
129
130 __asm__ __volatile__(
131 "1: l32i %1, %3, 0\n"
132 " wsr %1, scompare1\n"
133 " and %0, %1, %2\n"
134 " s32c1i %0, %3, 0\n"
135 " bne %0, %1, 1b\n"
136 : "=&a" (tmp), "=&a" (value)
137 : "a" (~mask), "a" (p)
138 : "memory");
139 }
140
141 static inline void change_bit(unsigned int bit, volatile unsigned long *p)
142 {
143 unsigned long tmp, value;
144 unsigned long mask = 1UL << (bit & 31);
145
146 p += bit >> 5;
147
148 __asm__ __volatile__(
149 "1: l32i %1, %3, 0\n"
150 " wsr %1, scompare1\n"
151 " xor %0, %1, %2\n"
152 " s32c1i %0, %3, 0\n"
153 " bne %0, %1, 1b\n"
154 : "=&a" (tmp), "=&a" (value)
155 : "a" (mask), "a" (p)
156 : "memory");
157 }
158
159 static inline int
160 test_and_set_bit(unsigned int bit, volatile unsigned long *p)
161 {
162 unsigned long tmp, value;
163 unsigned long mask = 1UL << (bit & 31);
164
165 p += bit >> 5;
166
167 __asm__ __volatile__(
168 "1: l32i %1, %3, 0\n"
169 " wsr %1, scompare1\n"
170 " or %0, %1, %2\n"
171 " s32c1i %0, %3, 0\n"
172 " bne %0, %1, 1b\n"
173 : "=&a" (tmp), "=&a" (value)
174 : "a" (mask), "a" (p)
175 : "memory");
176
177 return tmp & mask;
178 }
179
180 static inline int
181 test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
182 {
183 unsigned long tmp, value;
184 unsigned long mask = 1UL << (bit & 31);
185
186 p += bit >> 5;
187
188 __asm__ __volatile__(
189 "1: l32i %1, %3, 0\n"
190 " wsr %1, scompare1\n"
191 " and %0, %1, %2\n"
192 " s32c1i %0, %3, 0\n"
193 " bne %0, %1, 1b\n"
194 : "=&a" (tmp), "=&a" (value)
195 : "a" (~mask), "a" (p)
196 : "memory");
197
198 return tmp & mask;
199 }
200
201 static inline int
202 test_and_change_bit(unsigned int bit, volatile unsigned long *p)
203 {
204 unsigned long tmp, value;
205 unsigned long mask = 1UL << (bit & 31);
206
207 p += bit >> 5;
208
209 __asm__ __volatile__(
210 "1: l32i %1, %3, 0\n"
211 " wsr %1, scompare1\n"
212 " xor %0, %1, %2\n"
213 " s32c1i %0, %3, 0\n"
214 " bne %0, %1, 1b\n"
215 : "=&a" (tmp), "=&a" (value)
216 : "a" (mask), "a" (p)
217 : "memory");
218
219 return tmp & mask;
220 }
221
222 #else
223
224 #include <asm-generic/bitops/atomic.h>
225
226 #endif /* XCHAL_HAVE_S32C1I */
227
228 #include <asm-generic/bitops/find.h>
229 #include <asm-generic/bitops/le.h>
230
231 #include <asm-generic/bitops/ext2-atomic-setbit.h>
232
233 #include <asm-generic/bitops/hweight.h>
234 #include <asm-generic/bitops/lock.h>
235 #include <asm-generic/bitops/sched.h>
236
237 #endif /* __KERNEL__ */
238
239 #endif /* _XTENSA_BITOPS_H */
This page took 0.039048 seconds and 5 git commands to generate.