Linux Audio

Check our new training course

Loading...
v6.2
  1/* SPDX-License-Identifier: GPL-2.0 */
  2/*
  3 *    Copyright IBM Corp. 1999,2013
  4 *
  5 *    Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
  6 *
  7 * The description below was taken in large parts from the powerpc
  8 * bitops header file:
  9 * Within a word, bits are numbered LSB first.  Lot's of places make
 10 * this assumption by directly testing bits with (val & (1<<nr)).
 11 * This can cause confusion for large (> 1 word) bitmaps on a
 12 * big-endian system because, unlike little endian, the number of each
 13 * bit depends on the word size.
 14 *
 15 * The bitop functions are defined to work on unsigned longs, so the bits
 16 * end up numbered:
 17 *   |63..............0|127............64|191...........128|255...........192|
 18 *
 19 * We also have special functions which work with an MSB0 encoding.
 20 * The bits are numbered:
 21 *   |0..............63|64............127|128...........191|192...........255|
 22 *
 23 * The main difference is that bit 0-63 in the bit number field needs to be
 24 * reversed compared to the LSB0 encoded bit fields. This can be achieved by
 25 * XOR with 0x3f.
 26 *
 27 */
 28
 29#ifndef _S390_BITOPS_H
 30#define _S390_BITOPS_H
 31
 32#ifndef _LINUX_BITOPS_H
 33#error only <linux/bitops.h> can be included directly
 34#endif
 35
 36#include <linux/typecheck.h>
 37#include <linux/compiler.h>
 38#include <linux/types.h>
 39#include <asm/atomic_ops.h>
 40#include <asm/barrier.h>
 41
 42#define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
 43
 44static inline unsigned long *
 45__bitops_word(unsigned long nr, const volatile unsigned long *ptr)
 46{
 47	unsigned long addr;
 48
 49	addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
 50	return (unsigned long *)addr;
 51}
 52
 53static inline unsigned long __bitops_mask(unsigned long nr)
 54{
 55	return 1UL << (nr & (BITS_PER_LONG - 1));
 56}
 57
 58static __always_inline void arch_set_bit(unsigned long nr, volatile unsigned long *ptr)
 59{
 60	unsigned long *addr = __bitops_word(nr, ptr);
 61	unsigned long mask = __bitops_mask(nr);
 62
 63	__atomic64_or(mask, (long *)addr);
 64}
 65
 66static __always_inline void arch_clear_bit(unsigned long nr, volatile unsigned long *ptr)
 67{
 68	unsigned long *addr = __bitops_word(nr, ptr);
 69	unsigned long mask = __bitops_mask(nr);
 70
 71	__atomic64_and(~mask, (long *)addr);
 72}
 73
 74static __always_inline void arch_change_bit(unsigned long nr,
 75					    volatile unsigned long *ptr)
 76{
 77	unsigned long *addr = __bitops_word(nr, ptr);
 78	unsigned long mask = __bitops_mask(nr);
 79
 80	__atomic64_xor(mask, (long *)addr);
 81}
 82
 83static inline bool arch_test_and_set_bit(unsigned long nr,
 84					 volatile unsigned long *ptr)
 85{
 86	unsigned long *addr = __bitops_word(nr, ptr);
 87	unsigned long mask = __bitops_mask(nr);
 88	unsigned long old;
 89
 90	old = __atomic64_or_barrier(mask, (long *)addr);
 91	return old & mask;
 92}
 93
 94static inline bool arch_test_and_clear_bit(unsigned long nr,
 95					   volatile unsigned long *ptr)
 96{
 97	unsigned long *addr = __bitops_word(nr, ptr);
 98	unsigned long mask = __bitops_mask(nr);
 99	unsigned long old;
100
101	old = __atomic64_and_barrier(~mask, (long *)addr);
102	return old & mask;
103}
104
105static inline bool arch_test_and_change_bit(unsigned long nr,
106					    volatile unsigned long *ptr)
107{
108	unsigned long *addr = __bitops_word(nr, ptr);
109	unsigned long mask = __bitops_mask(nr);
110	unsigned long old;
111
112	old = __atomic64_xor_barrier(mask, (long *)addr);
113	return old & mask;
114}
115
116static __always_inline void
117arch___set_bit(unsigned long nr, volatile unsigned long *addr)
118{
119	unsigned long *p = __bitops_word(nr, addr);
120	unsigned long mask = __bitops_mask(nr);
121
122	*p |= mask;
123}
124
125static __always_inline void
126arch___clear_bit(unsigned long nr, volatile unsigned long *addr)
127{
128	unsigned long *p = __bitops_word(nr, addr);
129	unsigned long mask = __bitops_mask(nr);
130
131	*p &= ~mask;
132}
133
134static __always_inline void
135arch___change_bit(unsigned long nr, volatile unsigned long *addr)
136{
137	unsigned long *p = __bitops_word(nr, addr);
138	unsigned long mask = __bitops_mask(nr);
139
140	*p ^= mask;
141}
142
143static __always_inline bool
144arch___test_and_set_bit(unsigned long nr, volatile unsigned long *addr)
145{
146	unsigned long *p = __bitops_word(nr, addr);
147	unsigned long mask = __bitops_mask(nr);
148	unsigned long old;
149
150	old = *p;
151	*p |= mask;
152	return old & mask;
153}
154
155static __always_inline bool
156arch___test_and_clear_bit(unsigned long nr, volatile unsigned long *addr)
157{
158	unsigned long *p = __bitops_word(nr, addr);
159	unsigned long mask = __bitops_mask(nr);
160	unsigned long old;
161
162	old = *p;
163	*p &= ~mask;
164	return old & mask;
165}
166
167static __always_inline bool
168arch___test_and_change_bit(unsigned long nr, volatile unsigned long *addr)
169{
170	unsigned long *p = __bitops_word(nr, addr);
171	unsigned long mask = __bitops_mask(nr);
172	unsigned long old;
173
174	old = *p;
175	*p ^= mask;
176	return old & mask;
177}
178
179#define arch_test_bit generic_test_bit
180#define arch_test_bit_acquire generic_test_bit_acquire
 
 
 
 
 
 
181
182static inline bool arch_test_and_set_bit_lock(unsigned long nr,
183					      volatile unsigned long *ptr)
184{
185	if (arch_test_bit(nr, ptr))
186		return true;
187	return arch_test_and_set_bit(nr, ptr);
188}
189
190static inline void arch_clear_bit_unlock(unsigned long nr,
191					 volatile unsigned long *ptr)
192{
193	smp_mb__before_atomic();
194	arch_clear_bit(nr, ptr);
195}
196
197static inline void arch___clear_bit_unlock(unsigned long nr,
198					   volatile unsigned long *ptr)
199{
200	smp_mb();
201	arch___clear_bit(nr, ptr);
202}
203
204#include <asm-generic/bitops/instrumented-atomic.h>
205#include <asm-generic/bitops/instrumented-non-atomic.h>
206#include <asm-generic/bitops/instrumented-lock.h>
207
208/*
209 * Functions which use MSB0 bit numbering.
210 * The bits are numbered:
211 *   |0..............63|64............127|128...........191|192...........255|
212 */
213unsigned long find_first_bit_inv(const unsigned long *addr, unsigned long size);
214unsigned long find_next_bit_inv(const unsigned long *addr, unsigned long size,
215				unsigned long offset);
216
217#define for_each_set_bit_inv(bit, addr, size)				\
218	for ((bit) = find_first_bit_inv((addr), (size));		\
219	     (bit) < (size);						\
220	     (bit) = find_next_bit_inv((addr), (size), (bit) + 1))
221
222static inline void set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
223{
224	return set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
225}
226
227static inline void clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
228{
229	return clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
230}
231
232static inline bool test_and_clear_bit_inv(unsigned long nr,
233					  volatile unsigned long *ptr)
234{
235	return test_and_clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
236}
237
238static inline void __set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
239{
240	return __set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
241}
242
243static inline void __clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
244{
245	return __clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
246}
247
248static inline bool test_bit_inv(unsigned long nr,
249				const volatile unsigned long *ptr)
250{
251	return test_bit(nr ^ (BITS_PER_LONG - 1), ptr);
252}
253
 
 
254/**
255 * __flogr - find leftmost one
256 * @word - The word to search
257 *
258 * Returns the bit number of the most significant bit set,
259 * where the most significant bit has bit number 0.
260 * If no bit is set this function returns 64.
261 */
262static inline unsigned char __flogr(unsigned long word)
263{
264	if (__builtin_constant_p(word)) {
265		unsigned long bit = 0;
266
267		if (!word)
268			return 64;
269		if (!(word & 0xffffffff00000000UL)) {
270			word <<= 32;
271			bit += 32;
272		}
273		if (!(word & 0xffff000000000000UL)) {
274			word <<= 16;
275			bit += 16;
276		}
277		if (!(word & 0xff00000000000000UL)) {
278			word <<= 8;
279			bit += 8;
280		}
281		if (!(word & 0xf000000000000000UL)) {
282			word <<= 4;
283			bit += 4;
284		}
285		if (!(word & 0xc000000000000000UL)) {
286			word <<= 2;
287			bit += 2;
288		}
289		if (!(word & 0x8000000000000000UL)) {
290			word <<= 1;
291			bit += 1;
292		}
293		return bit;
294	} else {
295		union register_pair rp;
296
297		rp.even = word;
298		asm volatile(
299			"       flogr   %[rp],%[rp]\n"
300			: [rp] "+d" (rp.pair) : : "cc");
301		return rp.even;
302	}
303}
304
305/**
306 * __ffs - find first bit in word.
307 * @word: The word to search
308 *
309 * Undefined if no bit exists, so code should check against 0 first.
310 */
311static inline unsigned long __ffs(unsigned long word)
312{
313	return __flogr(-word & word) ^ (BITS_PER_LONG - 1);
314}
315
316/**
317 * ffs - find first bit set
318 * @word: the word to search
319 *
320 * This is defined the same way as the libc and
321 * compiler builtin ffs routines (man ffs).
322 */
323static inline int ffs(int word)
324{
325	unsigned long mask = 2 * BITS_PER_LONG - 1;
326	unsigned int val = (unsigned int)word;
327
328	return (1 + (__flogr(-val & val) ^ (BITS_PER_LONG - 1))) & mask;
329}
330
331/**
332 * __fls - find last (most-significant) set bit in a long word
333 * @word: the word to search
334 *
335 * Undefined if no set bit exists, so code should check against 0 first.
336 */
337static inline unsigned long __fls(unsigned long word)
338{
339	return __flogr(word) ^ (BITS_PER_LONG - 1);
340}
341
342/**
343 * fls64 - find last set bit in a 64-bit word
344 * @word: the word to search
345 *
346 * This is defined in a similar way as the libc and compiler builtin
347 * ffsll, but returns the position of the most significant set bit.
348 *
349 * fls64(value) returns 0 if value is 0 or the position of the last
350 * set bit if value is nonzero. The last (most significant) bit is
351 * at position 64.
352 */
353static inline int fls64(unsigned long word)
354{
355	unsigned long mask = 2 * BITS_PER_LONG - 1;
356
357	return (1 + (__flogr(word) ^ (BITS_PER_LONG - 1))) & mask;
358}
359
360/**
361 * fls - find last (most-significant) bit set
362 * @word: the word to search
363 *
364 * This is defined the same way as ffs.
365 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
366 */
367static inline int fls(unsigned int word)
368{
369	return fls64(word);
370}
371
 
 
 
 
 
 
 
 
 
 
372#include <asm-generic/bitops/ffz.h>
 
373#include <asm-generic/bitops/hweight.h>
374#include <asm-generic/bitops/sched.h>
375#include <asm-generic/bitops/le.h>
376#include <asm-generic/bitops/ext2-atomic-setbit.h>
377
378#endif /* _S390_BITOPS_H */
v5.14.15
  1/* SPDX-License-Identifier: GPL-2.0 */
  2/*
  3 *    Copyright IBM Corp. 1999,2013
  4 *
  5 *    Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
  6 *
  7 * The description below was taken in large parts from the powerpc
  8 * bitops header file:
  9 * Within a word, bits are numbered LSB first.  Lot's of places make
 10 * this assumption by directly testing bits with (val & (1<<nr)).
 11 * This can cause confusion for large (> 1 word) bitmaps on a
 12 * big-endian system because, unlike little endian, the number of each
 13 * bit depends on the word size.
 14 *
 15 * The bitop functions are defined to work on unsigned longs, so the bits
 16 * end up numbered:
 17 *   |63..............0|127............64|191...........128|255...........192|
 18 *
 19 * We also have special functions which work with an MSB0 encoding.
 20 * The bits are numbered:
 21 *   |0..............63|64............127|128...........191|192...........255|
 22 *
 23 * The main difference is that bit 0-63 in the bit number field needs to be
 24 * reversed compared to the LSB0 encoded bit fields. This can be achieved by
 25 * XOR with 0x3f.
 26 *
 27 */
 28
 29#ifndef _S390_BITOPS_H
 30#define _S390_BITOPS_H
 31
 32#ifndef _LINUX_BITOPS_H
 33#error only <linux/bitops.h> can be included directly
 34#endif
 35
 36#include <linux/typecheck.h>
 37#include <linux/compiler.h>
 38#include <linux/types.h>
 39#include <asm/atomic_ops.h>
 40#include <asm/barrier.h>
 41
 42#define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
 43
 44static inline unsigned long *
 45__bitops_word(unsigned long nr, const volatile unsigned long *ptr)
 46{
 47	unsigned long addr;
 48
 49	addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
 50	return (unsigned long *)addr;
 51}
 52
 53static inline unsigned long __bitops_mask(unsigned long nr)
 54{
 55	return 1UL << (nr & (BITS_PER_LONG - 1));
 56}
 57
 58static __always_inline void arch_set_bit(unsigned long nr, volatile unsigned long *ptr)
 59{
 60	unsigned long *addr = __bitops_word(nr, ptr);
 61	unsigned long mask = __bitops_mask(nr);
 62
 63	__atomic64_or(mask, (long *)addr);
 64}
 65
 66static __always_inline void arch_clear_bit(unsigned long nr, volatile unsigned long *ptr)
 67{
 68	unsigned long *addr = __bitops_word(nr, ptr);
 69	unsigned long mask = __bitops_mask(nr);
 70
 71	__atomic64_and(~mask, (long *)addr);
 72}
 73
 74static __always_inline void arch_change_bit(unsigned long nr,
 75					    volatile unsigned long *ptr)
 76{
 77	unsigned long *addr = __bitops_word(nr, ptr);
 78	unsigned long mask = __bitops_mask(nr);
 79
 80	__atomic64_xor(mask, (long *)addr);
 81}
 82
 83static inline bool arch_test_and_set_bit(unsigned long nr,
 84					 volatile unsigned long *ptr)
 85{
 86	unsigned long *addr = __bitops_word(nr, ptr);
 87	unsigned long mask = __bitops_mask(nr);
 88	unsigned long old;
 89
 90	old = __atomic64_or_barrier(mask, (long *)addr);
 91	return old & mask;
 92}
 93
 94static inline bool arch_test_and_clear_bit(unsigned long nr,
 95					   volatile unsigned long *ptr)
 96{
 97	unsigned long *addr = __bitops_word(nr, ptr);
 98	unsigned long mask = __bitops_mask(nr);
 99	unsigned long old;
100
101	old = __atomic64_and_barrier(~mask, (long *)addr);
102	return old & mask;
103}
104
105static inline bool arch_test_and_change_bit(unsigned long nr,
106					    volatile unsigned long *ptr)
107{
108	unsigned long *addr = __bitops_word(nr, ptr);
109	unsigned long mask = __bitops_mask(nr);
110	unsigned long old;
111
112	old = __atomic64_xor_barrier(mask, (long *)addr);
113	return old & mask;
114}
115
116static inline void arch___set_bit(unsigned long nr, volatile unsigned long *ptr)
 
117{
118	unsigned long *addr = __bitops_word(nr, ptr);
119	unsigned long mask = __bitops_mask(nr);
120
121	*addr |= mask;
122}
123
124static inline void arch___clear_bit(unsigned long nr,
125				    volatile unsigned long *ptr)
126{
127	unsigned long *addr = __bitops_word(nr, ptr);
128	unsigned long mask = __bitops_mask(nr);
129
130	*addr &= ~mask;
131}
132
133static inline void arch___change_bit(unsigned long nr,
134				     volatile unsigned long *ptr)
135{
136	unsigned long *addr = __bitops_word(nr, ptr);
137	unsigned long mask = __bitops_mask(nr);
138
139	*addr ^= mask;
140}
141
142static inline bool arch___test_and_set_bit(unsigned long nr,
143					   volatile unsigned long *ptr)
144{
145	unsigned long *addr = __bitops_word(nr, ptr);
146	unsigned long mask = __bitops_mask(nr);
147	unsigned long old;
148
149	old = *addr;
150	*addr |= mask;
151	return old & mask;
152}
153
154static inline bool arch___test_and_clear_bit(unsigned long nr,
155					     volatile unsigned long *ptr)
156{
157	unsigned long *addr = __bitops_word(nr, ptr);
158	unsigned long mask = __bitops_mask(nr);
159	unsigned long old;
160
161	old = *addr;
162	*addr &= ~mask;
163	return old & mask;
164}
165
166static inline bool arch___test_and_change_bit(unsigned long nr,
167					      volatile unsigned long *ptr)
168{
169	unsigned long *addr = __bitops_word(nr, ptr);
170	unsigned long mask = __bitops_mask(nr);
171	unsigned long old;
172
173	old = *addr;
174	*addr ^= mask;
175	return old & mask;
176}
177
178static inline bool arch_test_bit(unsigned long nr,
179				 const volatile unsigned long *ptr)
180{
181	const volatile unsigned long *addr = __bitops_word(nr, ptr);
182	unsigned long mask = __bitops_mask(nr);
183
184	return *addr & mask;
185}
186
187static inline bool arch_test_and_set_bit_lock(unsigned long nr,
188					      volatile unsigned long *ptr)
189{
190	if (arch_test_bit(nr, ptr))
191		return 1;
192	return arch_test_and_set_bit(nr, ptr);
193}
194
195static inline void arch_clear_bit_unlock(unsigned long nr,
196					 volatile unsigned long *ptr)
197{
198	smp_mb__before_atomic();
199	arch_clear_bit(nr, ptr);
200}
201
202static inline void arch___clear_bit_unlock(unsigned long nr,
203					   volatile unsigned long *ptr)
204{
205	smp_mb();
206	arch___clear_bit(nr, ptr);
207}
208
209#include <asm-generic/bitops/instrumented-atomic.h>
210#include <asm-generic/bitops/instrumented-non-atomic.h>
211#include <asm-generic/bitops/instrumented-lock.h>
212
213/*
214 * Functions which use MSB0 bit numbering.
215 * The bits are numbered:
216 *   |0..............63|64............127|128...........191|192...........255|
217 */
218unsigned long find_first_bit_inv(const unsigned long *addr, unsigned long size);
219unsigned long find_next_bit_inv(const unsigned long *addr, unsigned long size,
220				unsigned long offset);
221
222#define for_each_set_bit_inv(bit, addr, size)				\
223	for ((bit) = find_first_bit_inv((addr), (size));		\
224	     (bit) < (size);						\
225	     (bit) = find_next_bit_inv((addr), (size), (bit) + 1))
226
227static inline void set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
228{
229	return set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
230}
231
232static inline void clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
233{
234	return clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
235}
236
237static inline bool test_and_clear_bit_inv(unsigned long nr,
238					  volatile unsigned long *ptr)
239{
240	return test_and_clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
241}
242
243static inline void __set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
244{
245	return __set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
246}
247
248static inline void __clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
249{
250	return __clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
251}
252
253static inline bool test_bit_inv(unsigned long nr,
254				const volatile unsigned long *ptr)
255{
256	return test_bit(nr ^ (BITS_PER_LONG - 1), ptr);
257}
258
259#ifdef CONFIG_HAVE_MARCH_Z9_109_FEATURES
260
261/**
262 * __flogr - find leftmost one
263 * @word - The word to search
264 *
265 * Returns the bit number of the most significant bit set,
266 * where the most significant bit has bit number 0.
267 * If no bit is set this function returns 64.
268 */
269static inline unsigned char __flogr(unsigned long word)
270{
271	if (__builtin_constant_p(word)) {
272		unsigned long bit = 0;
273
274		if (!word)
275			return 64;
276		if (!(word & 0xffffffff00000000UL)) {
277			word <<= 32;
278			bit += 32;
279		}
280		if (!(word & 0xffff000000000000UL)) {
281			word <<= 16;
282			bit += 16;
283		}
284		if (!(word & 0xff00000000000000UL)) {
285			word <<= 8;
286			bit += 8;
287		}
288		if (!(word & 0xf000000000000000UL)) {
289			word <<= 4;
290			bit += 4;
291		}
292		if (!(word & 0xc000000000000000UL)) {
293			word <<= 2;
294			bit += 2;
295		}
296		if (!(word & 0x8000000000000000UL)) {
297			word <<= 1;
298			bit += 1;
299		}
300		return bit;
301	} else {
302		union register_pair rp;
303
304		rp.even = word;
305		asm volatile(
306			"       flogr   %[rp],%[rp]\n"
307			: [rp] "+d" (rp.pair) : : "cc");
308		return rp.even;
309	}
310}
311
312/**
313 * __ffs - find first bit in word.
314 * @word: The word to search
315 *
316 * Undefined if no bit exists, so code should check against 0 first.
317 */
318static inline unsigned long __ffs(unsigned long word)
319{
320	return __flogr(-word & word) ^ (BITS_PER_LONG - 1);
321}
322
323/**
324 * ffs - find first bit set
325 * @word: the word to search
326 *
327 * This is defined the same way as the libc and
328 * compiler builtin ffs routines (man ffs).
329 */
330static inline int ffs(int word)
331{
332	unsigned long mask = 2 * BITS_PER_LONG - 1;
333	unsigned int val = (unsigned int)word;
334
335	return (1 + (__flogr(-val & val) ^ (BITS_PER_LONG - 1))) & mask;
336}
337
338/**
339 * __fls - find last (most-significant) set bit in a long word
340 * @word: the word to search
341 *
342 * Undefined if no set bit exists, so code should check against 0 first.
343 */
344static inline unsigned long __fls(unsigned long word)
345{
346	return __flogr(word) ^ (BITS_PER_LONG - 1);
347}
348
349/**
350 * fls64 - find last set bit in a 64-bit word
351 * @word: the word to search
352 *
353 * This is defined in a similar way as the libc and compiler builtin
354 * ffsll, but returns the position of the most significant set bit.
355 *
356 * fls64(value) returns 0 if value is 0 or the position of the last
357 * set bit if value is nonzero. The last (most significant) bit is
358 * at position 64.
359 */
360static inline int fls64(unsigned long word)
361{
362	unsigned long mask = 2 * BITS_PER_LONG - 1;
363
364	return (1 + (__flogr(word) ^ (BITS_PER_LONG - 1))) & mask;
365}
366
367/**
368 * fls - find last (most-significant) bit set
369 * @word: the word to search
370 *
371 * This is defined the same way as ffs.
372 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
373 */
374static inline int fls(unsigned int word)
375{
376	return fls64(word);
377}
378
379#else /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
380
381#include <asm-generic/bitops/__ffs.h>
382#include <asm-generic/bitops/ffs.h>
383#include <asm-generic/bitops/__fls.h>
384#include <asm-generic/bitops/fls.h>
385#include <asm-generic/bitops/fls64.h>
386
387#endif /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
388
389#include <asm-generic/bitops/ffz.h>
390#include <asm-generic/bitops/find.h>
391#include <asm-generic/bitops/hweight.h>
392#include <asm-generic/bitops/sched.h>
393#include <asm-generic/bitops/le.h>
394#include <asm-generic/bitops/ext2-atomic-setbit.h>
395
396#endif /* _S390_BITOPS_H */