Loading...
1/*
2 * include/asm-xtensa/bitops.h
3 *
4 * Atomic operations that C can't guarantee us.Useful for resource counting etc.
5 *
6 * This file is subject to the terms and conditions of the GNU General Public
7 * License. See the file "COPYING" in the main directory of this archive
8 * for more details.
9 *
10 * Copyright (C) 2001 - 2007 Tensilica Inc.
11 */
12
13#ifndef _XTENSA_BITOPS_H
14#define _XTENSA_BITOPS_H
15
16#ifndef _LINUX_BITOPS_H
17#error only <linux/bitops.h> can be included directly
18#endif
19
20#include <asm/processor.h>
21#include <asm/byteorder.h>
22#include <asm/barrier.h>
23
24#include <asm-generic/bitops/non-atomic.h>
25
26#if XCHAL_HAVE_NSA
27
28static inline unsigned long __cntlz (unsigned long x)
29{
30 int lz;
31 asm ("nsau %0, %1" : "=r" (lz) : "r" (x));
32 return lz;
33}
34
35/*
36 * ffz: Find first zero in word. Undefined if no zero exists.
37 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
38 */
39
40static inline int ffz(unsigned long x)
41{
42 return 31 - __cntlz(~x & -~x);
43}
44
45/*
46 * __ffs: Find first bit set in word. Return 0 for bit 0
47 */
48
49static inline unsigned long __ffs(unsigned long x)
50{
51 return 31 - __cntlz(x & -x);
52}
53
54/*
55 * ffs: Find first bit set in word. This is defined the same way as
56 * the libc and compiler builtin ffs routines, therefore
57 * differs in spirit from the above ffz (man ffs).
58 */
59
60static inline int ffs(unsigned long x)
61{
62 return 32 - __cntlz(x & -x);
63}
64
65/*
66 * fls: Find last (most-significant) bit set in word.
67 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
68 */
69
70static inline int fls (unsigned int x)
71{
72 return 32 - __cntlz(x);
73}
74
75/**
76 * __fls - find last (most-significant) set bit in a long word
77 * @word: the word to search
78 *
79 * Undefined if no set bit exists, so code should check against 0 first.
80 */
81static inline unsigned long __fls(unsigned long word)
82{
83 return 31 - __cntlz(word);
84}
85#else
86
87/* Use the generic implementation if we don't have the nsa/nsau instructions. */
88
89# include <asm-generic/bitops/ffs.h>
90# include <asm-generic/bitops/__ffs.h>
91# include <asm-generic/bitops/ffz.h>
92# include <asm-generic/bitops/fls.h>
93# include <asm-generic/bitops/__fls.h>
94
95#endif
96
97#include <asm-generic/bitops/fls64.h>
98
99#if XCHAL_HAVE_EXCLUSIVE
100
101static inline void set_bit(unsigned int bit, volatile unsigned long *p)
102{
103 unsigned long tmp;
104 unsigned long mask = 1UL << (bit & 31);
105
106 p += bit >> 5;
107
108 __asm__ __volatile__(
109 "1: l32ex %0, %2\n"
110 " or %0, %0, %1\n"
111 " s32ex %0, %2\n"
112 " getex %0\n"
113 " beqz %0, 1b\n"
114 : "=&a" (tmp)
115 : "a" (mask), "a" (p)
116 : "memory");
117}
118
119static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
120{
121 unsigned long tmp;
122 unsigned long mask = 1UL << (bit & 31);
123
124 p += bit >> 5;
125
126 __asm__ __volatile__(
127 "1: l32ex %0, %2\n"
128 " and %0, %0, %1\n"
129 " s32ex %0, %2\n"
130 " getex %0\n"
131 " beqz %0, 1b\n"
132 : "=&a" (tmp)
133 : "a" (~mask), "a" (p)
134 : "memory");
135}
136
137static inline void change_bit(unsigned int bit, volatile unsigned long *p)
138{
139 unsigned long tmp;
140 unsigned long mask = 1UL << (bit & 31);
141
142 p += bit >> 5;
143
144 __asm__ __volatile__(
145 "1: l32ex %0, %2\n"
146 " xor %0, %0, %1\n"
147 " s32ex %0, %2\n"
148 " getex %0\n"
149 " beqz %0, 1b\n"
150 : "=&a" (tmp)
151 : "a" (mask), "a" (p)
152 : "memory");
153}
154
155static inline int
156test_and_set_bit(unsigned int bit, volatile unsigned long *p)
157{
158 unsigned long tmp, value;
159 unsigned long mask = 1UL << (bit & 31);
160
161 p += bit >> 5;
162
163 __asm__ __volatile__(
164 "1: l32ex %1, %3\n"
165 " or %0, %1, %2\n"
166 " s32ex %0, %3\n"
167 " getex %0\n"
168 " beqz %0, 1b\n"
169 : "=&a" (tmp), "=&a" (value)
170 : "a" (mask), "a" (p)
171 : "memory");
172
173 return value & mask;
174}
175
176static inline int
177test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
178{
179 unsigned long tmp, value;
180 unsigned long mask = 1UL << (bit & 31);
181
182 p += bit >> 5;
183
184 __asm__ __volatile__(
185 "1: l32ex %1, %3\n"
186 " and %0, %1, %2\n"
187 " s32ex %0, %3\n"
188 " getex %0\n"
189 " beqz %0, 1b\n"
190 : "=&a" (tmp), "=&a" (value)
191 : "a" (~mask), "a" (p)
192 : "memory");
193
194 return value & mask;
195}
196
197static inline int
198test_and_change_bit(unsigned int bit, volatile unsigned long *p)
199{
200 unsigned long tmp, value;
201 unsigned long mask = 1UL << (bit & 31);
202
203 p += bit >> 5;
204
205 __asm__ __volatile__(
206 "1: l32ex %1, %3\n"
207 " xor %0, %1, %2\n"
208 " s32ex %0, %3\n"
209 " getex %0\n"
210 " beqz %0, 1b\n"
211 : "=&a" (tmp), "=&a" (value)
212 : "a" (mask), "a" (p)
213 : "memory");
214
215 return value & mask;
216}
217
218#elif XCHAL_HAVE_S32C1I
219
220static inline void set_bit(unsigned int bit, volatile unsigned long *p)
221{
222 unsigned long tmp, value;
223 unsigned long mask = 1UL << (bit & 31);
224
225 p += bit >> 5;
226
227 __asm__ __volatile__(
228 "1: l32i %1, %3, 0\n"
229 " wsr %1, scompare1\n"
230 " or %0, %1, %2\n"
231 " s32c1i %0, %3, 0\n"
232 " bne %0, %1, 1b\n"
233 : "=&a" (tmp), "=&a" (value)
234 : "a" (mask), "a" (p)
235 : "memory");
236}
237
238static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
239{
240 unsigned long tmp, value;
241 unsigned long mask = 1UL << (bit & 31);
242
243 p += bit >> 5;
244
245 __asm__ __volatile__(
246 "1: l32i %1, %3, 0\n"
247 " wsr %1, scompare1\n"
248 " and %0, %1, %2\n"
249 " s32c1i %0, %3, 0\n"
250 " bne %0, %1, 1b\n"
251 : "=&a" (tmp), "=&a" (value)
252 : "a" (~mask), "a" (p)
253 : "memory");
254}
255
256static inline void change_bit(unsigned int bit, volatile unsigned long *p)
257{
258 unsigned long tmp, value;
259 unsigned long mask = 1UL << (bit & 31);
260
261 p += bit >> 5;
262
263 __asm__ __volatile__(
264 "1: l32i %1, %3, 0\n"
265 " wsr %1, scompare1\n"
266 " xor %0, %1, %2\n"
267 " s32c1i %0, %3, 0\n"
268 " bne %0, %1, 1b\n"
269 : "=&a" (tmp), "=&a" (value)
270 : "a" (mask), "a" (p)
271 : "memory");
272}
273
274static inline int
275test_and_set_bit(unsigned int bit, volatile unsigned long *p)
276{
277 unsigned long tmp, value;
278 unsigned long mask = 1UL << (bit & 31);
279
280 p += bit >> 5;
281
282 __asm__ __volatile__(
283 "1: l32i %1, %3, 0\n"
284 " wsr %1, scompare1\n"
285 " or %0, %1, %2\n"
286 " s32c1i %0, %3, 0\n"
287 " bne %0, %1, 1b\n"
288 : "=&a" (tmp), "=&a" (value)
289 : "a" (mask), "a" (p)
290 : "memory");
291
292 return tmp & mask;
293}
294
295static inline int
296test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
297{
298 unsigned long tmp, value;
299 unsigned long mask = 1UL << (bit & 31);
300
301 p += bit >> 5;
302
303 __asm__ __volatile__(
304 "1: l32i %1, %3, 0\n"
305 " wsr %1, scompare1\n"
306 " and %0, %1, %2\n"
307 " s32c1i %0, %3, 0\n"
308 " bne %0, %1, 1b\n"
309 : "=&a" (tmp), "=&a" (value)
310 : "a" (~mask), "a" (p)
311 : "memory");
312
313 return tmp & mask;
314}
315
316static inline int
317test_and_change_bit(unsigned int bit, volatile unsigned long *p)
318{
319 unsigned long tmp, value;
320 unsigned long mask = 1UL << (bit & 31);
321
322 p += bit >> 5;
323
324 __asm__ __volatile__(
325 "1: l32i %1, %3, 0\n"
326 " wsr %1, scompare1\n"
327 " xor %0, %1, %2\n"
328 " s32c1i %0, %3, 0\n"
329 " bne %0, %1, 1b\n"
330 : "=&a" (tmp), "=&a" (value)
331 : "a" (mask), "a" (p)
332 : "memory");
333
334 return tmp & mask;
335}
336
337#else
338
339#include <asm-generic/bitops/atomic.h>
340
341#endif /* XCHAL_HAVE_S32C1I */
342
343#include <asm-generic/bitops/find.h>
344#include <asm-generic/bitops/le.h>
345
346#include <asm-generic/bitops/ext2-atomic-setbit.h>
347
348#include <asm-generic/bitops/hweight.h>
349#include <asm-generic/bitops/lock.h>
350#include <asm-generic/bitops/sched.h>
351
352#endif /* _XTENSA_BITOPS_H */
1/*
2 * include/asm-xtensa/bitops.h
3 *
4 * Atomic operations that C can't guarantee us.Useful for resource counting etc.
5 *
6 * This file is subject to the terms and conditions of the GNU General Public
7 * License. See the file "COPYING" in the main directory of this archive
8 * for more details.
9 *
10 * Copyright (C) 2001 - 2007 Tensilica Inc.
11 */
12
13#ifndef _XTENSA_BITOPS_H
14#define _XTENSA_BITOPS_H
15
16#ifdef __KERNEL__
17
18#ifndef _LINUX_BITOPS_H
19#error only <linux/bitops.h> can be included directly
20#endif
21
22#include <asm/processor.h>
23#include <asm/byteorder.h>
24
25#define smp_mb__before_clear_bit() smp_mb()
26#define smp_mb__after_clear_bit() smp_mb()
27
28#include <asm-generic/bitops/non-atomic.h>
29
30#if XCHAL_HAVE_NSA
31
32static inline unsigned long __cntlz (unsigned long x)
33{
34 int lz;
35 asm ("nsau %0, %1" : "=r" (lz) : "r" (x));
36 return lz;
37}
38
39/*
40 * ffz: Find first zero in word. Undefined if no zero exists.
41 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
42 */
43
44static inline int ffz(unsigned long x)
45{
46 return 31 - __cntlz(~x & -~x);
47}
48
49/*
50 * __ffs: Find first bit set in word. Return 0 for bit 0
51 */
52
53static inline int __ffs(unsigned long x)
54{
55 return 31 - __cntlz(x & -x);
56}
57
58/*
59 * ffs: Find first bit set in word. This is defined the same way as
60 * the libc and compiler builtin ffs routines, therefore
61 * differs in spirit from the above ffz (man ffs).
62 */
63
64static inline int ffs(unsigned long x)
65{
66 return 32 - __cntlz(x & -x);
67}
68
69/*
70 * fls: Find last (most-significant) bit set in word.
71 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
72 */
73
74static inline int fls (unsigned int x)
75{
76 return 32 - __cntlz(x);
77}
78
79/**
80 * __fls - find last (most-significant) set bit in a long word
81 * @word: the word to search
82 *
83 * Undefined if no set bit exists, so code should check against 0 first.
84 */
85static inline unsigned long __fls(unsigned long word)
86{
87 return 31 - __cntlz(word);
88}
89#else
90
91/* Use the generic implementation if we don't have the nsa/nsau instructions. */
92
93# include <asm-generic/bitops/ffs.h>
94# include <asm-generic/bitops/__ffs.h>
95# include <asm-generic/bitops/ffz.h>
96# include <asm-generic/bitops/fls.h>
97# include <asm-generic/bitops/__fls.h>
98
99#endif
100
101#include <asm-generic/bitops/fls64.h>
102
103#if XCHAL_HAVE_S32C1I
104
105static inline void set_bit(unsigned int bit, volatile unsigned long *p)
106{
107 unsigned long tmp, value;
108 unsigned long mask = 1UL << (bit & 31);
109
110 p += bit >> 5;
111
112 __asm__ __volatile__(
113 "1: l32i %1, %3, 0\n"
114 " wsr %1, scompare1\n"
115 " or %0, %1, %2\n"
116 " s32c1i %0, %3, 0\n"
117 " bne %0, %1, 1b\n"
118 : "=&a" (tmp), "=&a" (value)
119 : "a" (mask), "a" (p)
120 : "memory");
121}
122
123static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
124{
125 unsigned long tmp, value;
126 unsigned long mask = 1UL << (bit & 31);
127
128 p += bit >> 5;
129
130 __asm__ __volatile__(
131 "1: l32i %1, %3, 0\n"
132 " wsr %1, scompare1\n"
133 " and %0, %1, %2\n"
134 " s32c1i %0, %3, 0\n"
135 " bne %0, %1, 1b\n"
136 : "=&a" (tmp), "=&a" (value)
137 : "a" (~mask), "a" (p)
138 : "memory");
139}
140
141static inline void change_bit(unsigned int bit, volatile unsigned long *p)
142{
143 unsigned long tmp, value;
144 unsigned long mask = 1UL << (bit & 31);
145
146 p += bit >> 5;
147
148 __asm__ __volatile__(
149 "1: l32i %1, %3, 0\n"
150 " wsr %1, scompare1\n"
151 " xor %0, %1, %2\n"
152 " s32c1i %0, %3, 0\n"
153 " bne %0, %1, 1b\n"
154 : "=&a" (tmp), "=&a" (value)
155 : "a" (mask), "a" (p)
156 : "memory");
157}
158
159static inline int
160test_and_set_bit(unsigned int bit, volatile unsigned long *p)
161{
162 unsigned long tmp, value;
163 unsigned long mask = 1UL << (bit & 31);
164
165 p += bit >> 5;
166
167 __asm__ __volatile__(
168 "1: l32i %1, %3, 0\n"
169 " wsr %1, scompare1\n"
170 " or %0, %1, %2\n"
171 " s32c1i %0, %3, 0\n"
172 " bne %0, %1, 1b\n"
173 : "=&a" (tmp), "=&a" (value)
174 : "a" (mask), "a" (p)
175 : "memory");
176
177 return tmp & mask;
178}
179
180static inline int
181test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
182{
183 unsigned long tmp, value;
184 unsigned long mask = 1UL << (bit & 31);
185
186 p += bit >> 5;
187
188 __asm__ __volatile__(
189 "1: l32i %1, %3, 0\n"
190 " wsr %1, scompare1\n"
191 " and %0, %1, %2\n"
192 " s32c1i %0, %3, 0\n"
193 " bne %0, %1, 1b\n"
194 : "=&a" (tmp), "=&a" (value)
195 : "a" (~mask), "a" (p)
196 : "memory");
197
198 return tmp & mask;
199}
200
201static inline int
202test_and_change_bit(unsigned int bit, volatile unsigned long *p)
203{
204 unsigned long tmp, value;
205 unsigned long mask = 1UL << (bit & 31);
206
207 p += bit >> 5;
208
209 __asm__ __volatile__(
210 "1: l32i %1, %3, 0\n"
211 " wsr %1, scompare1\n"
212 " xor %0, %1, %2\n"
213 " s32c1i %0, %3, 0\n"
214 " bne %0, %1, 1b\n"
215 : "=&a" (tmp), "=&a" (value)
216 : "a" (mask), "a" (p)
217 : "memory");
218
219 return tmp & mask;
220}
221
222#else
223
224#include <asm-generic/bitops/atomic.h>
225
226#endif /* XCHAL_HAVE_S32C1I */
227
228#include <asm-generic/bitops/find.h>
229#include <asm-generic/bitops/le.h>
230
231#include <asm-generic/bitops/ext2-atomic-setbit.h>
232
233#include <asm-generic/bitops/hweight.h>
234#include <asm-generic/bitops/lock.h>
235#include <asm-generic/bitops/sched.h>
236
237#endif /* __KERNEL__ */
238
239#endif /* _XTENSA_BITOPS_H */