Loading...
1/*
2 * include/asm-xtensa/bitops.h
3 *
4 * Atomic operations that C can't guarantee us.Useful for resource counting etc.
5 *
6 * This file is subject to the terms and conditions of the GNU General Public
7 * License. See the file "COPYING" in the main directory of this archive
8 * for more details.
9 *
10 * Copyright (C) 2001 - 2007 Tensilica Inc.
11 */
12
13#ifndef _XTENSA_BITOPS_H
14#define _XTENSA_BITOPS_H
15
16#ifndef _LINUX_BITOPS_H
17#error only <linux/bitops.h> can be included directly
18#endif
19
20#include <asm/processor.h>
21#include <asm/byteorder.h>
22#include <asm/barrier.h>
23
24#include <asm-generic/bitops/non-atomic.h>
25
26#if XCHAL_HAVE_NSA
27
28static inline unsigned long __cntlz (unsigned long x)
29{
30 int lz;
31 asm ("nsau %0, %1" : "=r" (lz) : "r" (x));
32 return lz;
33}
34
35/*
36 * ffz: Find first zero in word. Undefined if no zero exists.
37 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
38 */
39
40static inline int ffz(unsigned long x)
41{
42 return 31 - __cntlz(~x & -~x);
43}
44
45/*
46 * __ffs: Find first bit set in word. Return 0 for bit 0
47 */
48
49static inline unsigned long __ffs(unsigned long x)
50{
51 return 31 - __cntlz(x & -x);
52}
53
54/*
55 * ffs: Find first bit set in word. This is defined the same way as
56 * the libc and compiler builtin ffs routines, therefore
57 * differs in spirit from the above ffz (man ffs).
58 */
59
60static inline int ffs(unsigned long x)
61{
62 return 32 - __cntlz(x & -x);
63}
64
65/*
66 * fls: Find last (most-significant) bit set in word.
67 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
68 */
69
70static inline int fls (unsigned int x)
71{
72 return 32 - __cntlz(x);
73}
74
75/**
76 * __fls - find last (most-significant) set bit in a long word
77 * @word: the word to search
78 *
79 * Undefined if no set bit exists, so code should check against 0 first.
80 */
81static inline unsigned long __fls(unsigned long word)
82{
83 return 31 - __cntlz(word);
84}
85#else
86
87/* Use the generic implementation if we don't have the nsa/nsau instructions. */
88
89# include <asm-generic/bitops/ffs.h>
90# include <asm-generic/bitops/__ffs.h>
91# include <asm-generic/bitops/ffz.h>
92# include <asm-generic/bitops/fls.h>
93# include <asm-generic/bitops/__fls.h>
94
95#endif
96
97#include <asm-generic/bitops/fls64.h>
98
99#if XCHAL_HAVE_EXCLUSIVE
100
101#define BIT_OP(op, insn, inv) \
102static inline void arch_##op##_bit(unsigned int bit, volatile unsigned long *p)\
103{ \
104 unsigned long tmp; \
105 unsigned long mask = 1UL << (bit & 31); \
106 \
107 p += bit >> 5; \
108 \
109 __asm__ __volatile__( \
110 "1: l32ex %[tmp], %[addr]\n" \
111 " "insn" %[tmp], %[tmp], %[mask]\n" \
112 " s32ex %[tmp], %[addr]\n" \
113 " getex %[tmp]\n" \
114 " beqz %[tmp], 1b\n" \
115 : [tmp] "=&a" (tmp) \
116 : [mask] "a" (inv mask), [addr] "a" (p) \
117 : "memory"); \
118}
119
120#define TEST_AND_BIT_OP(op, insn, inv) \
121static inline int \
122arch_test_and_##op##_bit(unsigned int bit, volatile unsigned long *p) \
123{ \
124 unsigned long tmp, value; \
125 unsigned long mask = 1UL << (bit & 31); \
126 \
127 p += bit >> 5; \
128 \
129 __asm__ __volatile__( \
130 "1: l32ex %[value], %[addr]\n" \
131 " "insn" %[tmp], %[value], %[mask]\n" \
132 " s32ex %[tmp], %[addr]\n" \
133 " getex %[tmp]\n" \
134 " beqz %[tmp], 1b\n" \
135 : [tmp] "=&a" (tmp), [value] "=&a" (value) \
136 : [mask] "a" (inv mask), [addr] "a" (p) \
137 : "memory"); \
138 \
139 return value & mask; \
140}
141
142#elif XCHAL_HAVE_S32C1I
143
144#define BIT_OP(op, insn, inv) \
145static inline void arch_##op##_bit(unsigned int bit, volatile unsigned long *p)\
146{ \
147 unsigned long tmp, value; \
148 unsigned long mask = 1UL << (bit & 31); \
149 \
150 p += bit >> 5; \
151 \
152 __asm__ __volatile__( \
153 "1: l32i %[value], %[mem]\n" \
154 " wsr %[value], scompare1\n" \
155 " "insn" %[tmp], %[value], %[mask]\n" \
156 " s32c1i %[tmp], %[mem]\n" \
157 " bne %[tmp], %[value], 1b\n" \
158 : [tmp] "=&a" (tmp), [value] "=&a" (value), \
159 [mem] "+m" (*p) \
160 : [mask] "a" (inv mask) \
161 : "memory"); \
162}
163
164#define TEST_AND_BIT_OP(op, insn, inv) \
165static inline int \
166arch_test_and_##op##_bit(unsigned int bit, volatile unsigned long *p) \
167{ \
168 unsigned long tmp, value; \
169 unsigned long mask = 1UL << (bit & 31); \
170 \
171 p += bit >> 5; \
172 \
173 __asm__ __volatile__( \
174 "1: l32i %[value], %[mem]\n" \
175 " wsr %[value], scompare1\n" \
176 " "insn" %[tmp], %[value], %[mask]\n" \
177 " s32c1i %[tmp], %[mem]\n" \
178 " bne %[tmp], %[value], 1b\n" \
179 : [tmp] "=&a" (tmp), [value] "=&a" (value), \
180 [mem] "+m" (*p) \
181 : [mask] "a" (inv mask) \
182 : "memory"); \
183 \
184 return tmp & mask; \
185}
186
187#else
188
189#define BIT_OP(op, insn, inv)
190#define TEST_AND_BIT_OP(op, insn, inv)
191
192#include <asm-generic/bitops/atomic.h>
193
194#endif /* XCHAL_HAVE_S32C1I */
195
196#define BIT_OPS(op, insn, inv) \
197 BIT_OP(op, insn, inv) \
198 TEST_AND_BIT_OP(op, insn, inv)
199
200BIT_OPS(set, "or", )
201BIT_OPS(clear, "and", ~)
202BIT_OPS(change, "xor", )
203
204#undef BIT_OPS
205#undef BIT_OP
206#undef TEST_AND_BIT_OP
207
208#include <asm-generic/bitops/instrumented-atomic.h>
209
210#include <asm-generic/bitops/le.h>
211
212#include <asm-generic/bitops/ext2-atomic-setbit.h>
213
214#include <asm-generic/bitops/hweight.h>
215#include <asm-generic/bitops/lock.h>
216#include <asm-generic/bitops/sched.h>
217
218#endif /* _XTENSA_BITOPS_H */
1/*
2 * include/asm-xtensa/bitops.h
3 *
4 * Atomic operations that C can't guarantee us.Useful for resource counting etc.
5 *
6 * This file is subject to the terms and conditions of the GNU General Public
7 * License. See the file "COPYING" in the main directory of this archive
8 * for more details.
9 *
10 * Copyright (C) 2001 - 2007 Tensilica Inc.
11 */
12
13#ifndef _XTENSA_BITOPS_H
14#define _XTENSA_BITOPS_H
15
16#ifdef __KERNEL__
17
18#ifndef _LINUX_BITOPS_H
19#error only <linux/bitops.h> can be included directly
20#endif
21
22#include <asm/processor.h>
23#include <asm/byteorder.h>
24#include <asm/barrier.h>
25
26#include <asm-generic/bitops/non-atomic.h>
27
28#if XCHAL_HAVE_NSA
29
30static inline unsigned long __cntlz (unsigned long x)
31{
32 int lz;
33 asm ("nsau %0, %1" : "=r" (lz) : "r" (x));
34 return lz;
35}
36
37/*
38 * ffz: Find first zero in word. Undefined if no zero exists.
39 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
40 */
41
42static inline int ffz(unsigned long x)
43{
44 return 31 - __cntlz(~x & -~x);
45}
46
47/*
48 * __ffs: Find first bit set in word. Return 0 for bit 0
49 */
50
51static inline unsigned long __ffs(unsigned long x)
52{
53 return 31 - __cntlz(x & -x);
54}
55
56/*
57 * ffs: Find first bit set in word. This is defined the same way as
58 * the libc and compiler builtin ffs routines, therefore
59 * differs in spirit from the above ffz (man ffs).
60 */
61
62static inline int ffs(unsigned long x)
63{
64 return 32 - __cntlz(x & -x);
65}
66
67/*
68 * fls: Find last (most-significant) bit set in word.
69 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
70 */
71
72static inline int fls (unsigned int x)
73{
74 return 32 - __cntlz(x);
75}
76
77/**
78 * __fls - find last (most-significant) set bit in a long word
79 * @word: the word to search
80 *
81 * Undefined if no set bit exists, so code should check against 0 first.
82 */
83static inline unsigned long __fls(unsigned long word)
84{
85 return 31 - __cntlz(word);
86}
87#else
88
89/* Use the generic implementation if we don't have the nsa/nsau instructions. */
90
91# include <asm-generic/bitops/ffs.h>
92# include <asm-generic/bitops/__ffs.h>
93# include <asm-generic/bitops/ffz.h>
94# include <asm-generic/bitops/fls.h>
95# include <asm-generic/bitops/__fls.h>
96
97#endif
98
99#include <asm-generic/bitops/fls64.h>
100
101#if XCHAL_HAVE_S32C1I
102
103static inline void set_bit(unsigned int bit, volatile unsigned long *p)
104{
105 unsigned long tmp, value;
106 unsigned long mask = 1UL << (bit & 31);
107
108 p += bit >> 5;
109
110 __asm__ __volatile__(
111 "1: l32i %1, %3, 0\n"
112 " wsr %1, scompare1\n"
113 " or %0, %1, %2\n"
114 " s32c1i %0, %3, 0\n"
115 " bne %0, %1, 1b\n"
116 : "=&a" (tmp), "=&a" (value)
117 : "a" (mask), "a" (p)
118 : "memory");
119}
120
121static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
122{
123 unsigned long tmp, value;
124 unsigned long mask = 1UL << (bit & 31);
125
126 p += bit >> 5;
127
128 __asm__ __volatile__(
129 "1: l32i %1, %3, 0\n"
130 " wsr %1, scompare1\n"
131 " and %0, %1, %2\n"
132 " s32c1i %0, %3, 0\n"
133 " bne %0, %1, 1b\n"
134 : "=&a" (tmp), "=&a" (value)
135 : "a" (~mask), "a" (p)
136 : "memory");
137}
138
139static inline void change_bit(unsigned int bit, volatile unsigned long *p)
140{
141 unsigned long tmp, value;
142 unsigned long mask = 1UL << (bit & 31);
143
144 p += bit >> 5;
145
146 __asm__ __volatile__(
147 "1: l32i %1, %3, 0\n"
148 " wsr %1, scompare1\n"
149 " xor %0, %1, %2\n"
150 " s32c1i %0, %3, 0\n"
151 " bne %0, %1, 1b\n"
152 : "=&a" (tmp), "=&a" (value)
153 : "a" (mask), "a" (p)
154 : "memory");
155}
156
157static inline int
158test_and_set_bit(unsigned int bit, volatile unsigned long *p)
159{
160 unsigned long tmp, value;
161 unsigned long mask = 1UL << (bit & 31);
162
163 p += bit >> 5;
164
165 __asm__ __volatile__(
166 "1: l32i %1, %3, 0\n"
167 " wsr %1, scompare1\n"
168 " or %0, %1, %2\n"
169 " s32c1i %0, %3, 0\n"
170 " bne %0, %1, 1b\n"
171 : "=&a" (tmp), "=&a" (value)
172 : "a" (mask), "a" (p)
173 : "memory");
174
175 return tmp & mask;
176}
177
178static inline int
179test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
180{
181 unsigned long tmp, value;
182 unsigned long mask = 1UL << (bit & 31);
183
184 p += bit >> 5;
185
186 __asm__ __volatile__(
187 "1: l32i %1, %3, 0\n"
188 " wsr %1, scompare1\n"
189 " and %0, %1, %2\n"
190 " s32c1i %0, %3, 0\n"
191 " bne %0, %1, 1b\n"
192 : "=&a" (tmp), "=&a" (value)
193 : "a" (~mask), "a" (p)
194 : "memory");
195
196 return tmp & mask;
197}
198
199static inline int
200test_and_change_bit(unsigned int bit, volatile unsigned long *p)
201{
202 unsigned long tmp, value;
203 unsigned long mask = 1UL << (bit & 31);
204
205 p += bit >> 5;
206
207 __asm__ __volatile__(
208 "1: l32i %1, %3, 0\n"
209 " wsr %1, scompare1\n"
210 " xor %0, %1, %2\n"
211 " s32c1i %0, %3, 0\n"
212 " bne %0, %1, 1b\n"
213 : "=&a" (tmp), "=&a" (value)
214 : "a" (mask), "a" (p)
215 : "memory");
216
217 return tmp & mask;
218}
219
220#else
221
222#include <asm-generic/bitops/atomic.h>
223
224#endif /* XCHAL_HAVE_S32C1I */
225
226#include <asm-generic/bitops/find.h>
227#include <asm-generic/bitops/le.h>
228
229#include <asm-generic/bitops/ext2-atomic-setbit.h>
230
231#include <asm-generic/bitops/hweight.h>
232#include <asm-generic/bitops/lock.h>
233#include <asm-generic/bitops/sched.h>
234
235#endif /* __KERNEL__ */
236
237#endif /* _XTENSA_BITOPS_H */