Linux Audio

Check our new training course

Loading...
v6.2
  1/* SPDX-License-Identifier: GPL-2.0 */
  2/*
  3 *    Copyright IBM Corp. 1999,2013
  4 *
  5 *    Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
  6 *
  7 * The description below was taken in large parts from the powerpc
  8 * bitops header file:
  9 * Within a word, bits are numbered LSB first.  Lot's of places make
 10 * this assumption by directly testing bits with (val & (1<<nr)).
 11 * This can cause confusion for large (> 1 word) bitmaps on a
 12 * big-endian system because, unlike little endian, the number of each
 13 * bit depends on the word size.
 14 *
 15 * The bitop functions are defined to work on unsigned longs, so the bits
 16 * end up numbered:
 17 *   |63..............0|127............64|191...........128|255...........192|
 18 *
 
 
 
 
 
 
 
 
 19 * We also have special functions which work with an MSB0 encoding.
 20 * The bits are numbered:
 21 *   |0..............63|64............127|128...........191|192...........255|
 22 *
 23 * The main difference is that bit 0-63 in the bit number field needs to be
 24 * reversed compared to the LSB0 encoded bit fields. This can be achieved by
 25 * XOR with 0x3f.
 26 *
 27 */
 28
 29#ifndef _S390_BITOPS_H
 30#define _S390_BITOPS_H
 31
 32#ifndef _LINUX_BITOPS_H
 33#error only <linux/bitops.h> can be included directly
 34#endif
 35
 36#include <linux/typecheck.h>
 37#include <linux/compiler.h>
 38#include <linux/types.h>
 39#include <asm/atomic_ops.h>
 40#include <asm/barrier.h>
 41
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 42#define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
 43
 44static inline unsigned long *
 45__bitops_word(unsigned long nr, const volatile unsigned long *ptr)
 46{
 47	unsigned long addr;
 48
 49	addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
 50	return (unsigned long *)addr;
 51}
 52
 53static inline unsigned long __bitops_mask(unsigned long nr)
 
 54{
 55	return 1UL << (nr & (BITS_PER_LONG - 1));
 56}
 57
 58static __always_inline void arch_set_bit(unsigned long nr, volatile unsigned long *ptr)
 59{
 60	unsigned long *addr = __bitops_word(nr, ptr);
 61	unsigned long mask = __bitops_mask(nr);
 
 
 
 
 62
 63	__atomic64_or(mask, (long *)addr);
 
 
 
 
 
 
 
 
 
 64}
 65
 66static __always_inline void arch_clear_bit(unsigned long nr, volatile unsigned long *ptr)
 67{
 68	unsigned long *addr = __bitops_word(nr, ptr);
 69	unsigned long mask = __bitops_mask(nr);
 
 
 
 
 70
 71	__atomic64_and(~mask, (long *)addr);
 
 
 
 
 
 
 
 
 
 72}
 73
 74static __always_inline void arch_change_bit(unsigned long nr,
 75					    volatile unsigned long *ptr)
 76{
 77	unsigned long *addr = __bitops_word(nr, ptr);
 78	unsigned long mask = __bitops_mask(nr);
 
 
 
 
 79
 80	__atomic64_xor(mask, (long *)addr);
 
 
 
 
 
 
 
 
 
 81}
 82
 83static inline bool arch_test_and_set_bit(unsigned long nr,
 84					 volatile unsigned long *ptr)
 85{
 86	unsigned long *addr = __bitops_word(nr, ptr);
 87	unsigned long mask = __bitops_mask(nr);
 88	unsigned long old;
 89
 90	old = __atomic64_or_barrier(mask, (long *)addr);
 91	return old & mask;
 
 92}
 93
 94static inline bool arch_test_and_clear_bit(unsigned long nr,
 95					   volatile unsigned long *ptr)
 96{
 97	unsigned long *addr = __bitops_word(nr, ptr);
 98	unsigned long mask = __bitops_mask(nr);
 99	unsigned long old;
100
101	old = __atomic64_and_barrier(~mask, (long *)addr);
102	return old & mask;
 
103}
104
105static inline bool arch_test_and_change_bit(unsigned long nr,
106					    volatile unsigned long *ptr)
107{
108	unsigned long *addr = __bitops_word(nr, ptr);
109	unsigned long mask = __bitops_mask(nr);
110	unsigned long old;
111
112	old = __atomic64_xor_barrier(mask, (long *)addr);
113	return old & mask;
 
114}
115
116static __always_inline void
117arch___set_bit(unsigned long nr, volatile unsigned long *addr)
118{
119	unsigned long *p = __bitops_word(nr, addr);
120	unsigned long mask = __bitops_mask(nr);
121
122	*p |= mask;
123}
124
125static __always_inline void
126arch___clear_bit(unsigned long nr, volatile unsigned long *addr)
127{
128	unsigned long *p = __bitops_word(nr, addr);
129	unsigned long mask = __bitops_mask(nr);
130
131	*p &= ~mask;
132}
133
134static __always_inline void
135arch___change_bit(unsigned long nr, volatile unsigned long *addr)
136{
137	unsigned long *p = __bitops_word(nr, addr);
138	unsigned long mask = __bitops_mask(nr);
139
140	*p ^= mask;
141}
142
143static __always_inline bool
144arch___test_and_set_bit(unsigned long nr, volatile unsigned long *addr)
145{
146	unsigned long *p = __bitops_word(nr, addr);
147	unsigned long mask = __bitops_mask(nr);
148	unsigned long old;
149
150	old = *p;
151	*p |= mask;
152	return old & mask;
153}
154
155static __always_inline bool
156arch___test_and_clear_bit(unsigned long nr, volatile unsigned long *addr)
157{
158	unsigned long *p = __bitops_word(nr, addr);
159	unsigned long mask = __bitops_mask(nr);
160	unsigned long old;
161
162	old = *p;
163	*p &= ~mask;
164	return old & mask;
165}
166
167static __always_inline bool
168arch___test_and_change_bit(unsigned long nr, volatile unsigned long *addr)
169{
170	unsigned long *p = __bitops_word(nr, addr);
171	unsigned long mask = __bitops_mask(nr);
172	unsigned long old;
173
174	old = *p;
175	*p ^= mask;
176	return old & mask;
177}
178
179#define arch_test_bit generic_test_bit
180#define arch_test_bit_acquire generic_test_bit_acquire
 
 
 
 
 
 
181
182static inline bool arch_test_and_set_bit_lock(unsigned long nr,
183					      volatile unsigned long *ptr)
184{
185	if (arch_test_bit(nr, ptr))
186		return true;
187	return arch_test_and_set_bit(nr, ptr);
188}
189
190static inline void arch_clear_bit_unlock(unsigned long nr,
191					 volatile unsigned long *ptr)
192{
193	smp_mb__before_atomic();
194	arch_clear_bit(nr, ptr);
195}
196
197static inline void arch___clear_bit_unlock(unsigned long nr,
198					   volatile unsigned long *ptr)
199{
200	smp_mb();
201	arch___clear_bit(nr, ptr);
202}
203
204#include <asm-generic/bitops/instrumented-atomic.h>
205#include <asm-generic/bitops/instrumented-non-atomic.h>
206#include <asm-generic/bitops/instrumented-lock.h>
207
208/*
209 * Functions which use MSB0 bit numbering.
210 * The bits are numbered:
211 *   |0..............63|64............127|128...........191|192...........255|
212 */
213unsigned long find_first_bit_inv(const unsigned long *addr, unsigned long size);
214unsigned long find_next_bit_inv(const unsigned long *addr, unsigned long size,
215				unsigned long offset);
216
217#define for_each_set_bit_inv(bit, addr, size)				\
218	for ((bit) = find_first_bit_inv((addr), (size));		\
219	     (bit) < (size);						\
220	     (bit) = find_next_bit_inv((addr), (size), (bit) + 1))
221
222static inline void set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
223{
224	return set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
225}
226
227static inline void clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
228{
229	return clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
230}
231
232static inline bool test_and_clear_bit_inv(unsigned long nr,
233					  volatile unsigned long *ptr)
234{
235	return test_and_clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
236}
237
238static inline void __set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
239{
240	return __set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
241}
242
243static inline void __clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
244{
245	return __clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
246}
247
248static inline bool test_bit_inv(unsigned long nr,
249				const volatile unsigned long *ptr)
250{
251	return test_bit(nr ^ (BITS_PER_LONG - 1), ptr);
252}
253
 
 
254/**
255 * __flogr - find leftmost one
256 * @word - The word to search
257 *
258 * Returns the bit number of the most significant bit set,
259 * where the most significant bit has bit number 0.
260 * If no bit is set this function returns 64.
261 */
262static inline unsigned char __flogr(unsigned long word)
263{
264	if (__builtin_constant_p(word)) {
265		unsigned long bit = 0;
266
267		if (!word)
268			return 64;
269		if (!(word & 0xffffffff00000000UL)) {
270			word <<= 32;
271			bit += 32;
272		}
273		if (!(word & 0xffff000000000000UL)) {
274			word <<= 16;
275			bit += 16;
276		}
277		if (!(word & 0xff00000000000000UL)) {
278			word <<= 8;
279			bit += 8;
280		}
281		if (!(word & 0xf000000000000000UL)) {
282			word <<= 4;
283			bit += 4;
284		}
285		if (!(word & 0xc000000000000000UL)) {
286			word <<= 2;
287			bit += 2;
288		}
289		if (!(word & 0x8000000000000000UL)) {
290			word <<= 1;
291			bit += 1;
292		}
293		return bit;
294	} else {
295		union register_pair rp;
 
296
297		rp.even = word;
298		asm volatile(
299			"       flogr   %[rp],%[rp]\n"
300			: [rp] "+d" (rp.pair) : : "cc");
301		return rp.even;
302	}
303}
304
305/**
306 * __ffs - find first bit in word.
307 * @word: The word to search
308 *
309 * Undefined if no bit exists, so code should check against 0 first.
310 */
311static inline unsigned long __ffs(unsigned long word)
312{
313	return __flogr(-word & word) ^ (BITS_PER_LONG - 1);
314}
315
316/**
317 * ffs - find first bit set
318 * @word: the word to search
319 *
320 * This is defined the same way as the libc and
321 * compiler builtin ffs routines (man ffs).
322 */
323static inline int ffs(int word)
324{
325	unsigned long mask = 2 * BITS_PER_LONG - 1;
326	unsigned int val = (unsigned int)word;
327
328	return (1 + (__flogr(-val & val) ^ (BITS_PER_LONG - 1))) & mask;
329}
330
331/**
332 * __fls - find last (most-significant) set bit in a long word
333 * @word: the word to search
334 *
335 * Undefined if no set bit exists, so code should check against 0 first.
336 */
337static inline unsigned long __fls(unsigned long word)
338{
339	return __flogr(word) ^ (BITS_PER_LONG - 1);
340}
341
342/**
343 * fls64 - find last set bit in a 64-bit word
344 * @word: the word to search
345 *
346 * This is defined in a similar way as the libc and compiler builtin
347 * ffsll, but returns the position of the most significant set bit.
348 *
349 * fls64(value) returns 0 if value is 0 or the position of the last
350 * set bit if value is nonzero. The last (most significant) bit is
351 * at position 64.
352 */
353static inline int fls64(unsigned long word)
354{
355	unsigned long mask = 2 * BITS_PER_LONG - 1;
356
357	return (1 + (__flogr(word) ^ (BITS_PER_LONG - 1))) & mask;
358}
359
360/**
361 * fls - find last (most-significant) bit set
362 * @word: the word to search
363 *
364 * This is defined the same way as ffs.
365 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
366 */
367static inline int fls(unsigned int word)
368{
369	return fls64(word);
370}
371
 
 
 
 
 
 
 
 
 
 
372#include <asm-generic/bitops/ffz.h>
 
373#include <asm-generic/bitops/hweight.h>
374#include <asm-generic/bitops/sched.h>
375#include <asm-generic/bitops/le.h>
376#include <asm-generic/bitops/ext2-atomic-setbit.h>
377
378#endif /* _S390_BITOPS_H */
v4.6
 
  1/*
  2 *    Copyright IBM Corp. 1999,2013
  3 *
  4 *    Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
  5 *
  6 * The description below was taken in large parts from the powerpc
  7 * bitops header file:
  8 * Within a word, bits are numbered LSB first.  Lot's of places make
  9 * this assumption by directly testing bits with (val & (1<<nr)).
 10 * This can cause confusion for large (> 1 word) bitmaps on a
 11 * big-endian system because, unlike little endian, the number of each
 12 * bit depends on the word size.
 13 *
 14 * The bitop functions are defined to work on unsigned longs, so the bits
 15 * end up numbered:
 16 *   |63..............0|127............64|191...........128|255...........192|
 17 *
 18 * There are a few little-endian macros used mostly for filesystem
 19 * bitmaps, these work on similar bit array layouts, but byte-oriented:
 20 *   |7...0|15...8|23...16|31...24|39...32|47...40|55...48|63...56|
 21 *
 22 * The main difference is that bit 3-5 in the bit number field needs to be
 23 * reversed compared to the big-endian bit fields. This can be achieved by
 24 * XOR with 0x38.
 25 *
 26 * We also have special functions which work with an MSB0 encoding.
 27 * The bits are numbered:
 28 *   |0..............63|64............127|128...........191|192...........255|
 29 *
 30 * The main difference is that bit 0-63 in the bit number field needs to be
 31 * reversed compared to the LSB0 encoded bit fields. This can be achieved by
 32 * XOR with 0x3f.
 33 *
 34 */
 35
 36#ifndef _S390_BITOPS_H
 37#define _S390_BITOPS_H
 38
 39#ifndef _LINUX_BITOPS_H
 40#error only <linux/bitops.h> can be included directly
 41#endif
 42
 43#include <linux/typecheck.h>
 44#include <linux/compiler.h>
 
 
 45#include <asm/barrier.h>
 46
 47#define __BITOPS_NO_BARRIER	"\n"
 48
 49#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
 50
 51#define __BITOPS_OR		"laog"
 52#define __BITOPS_AND		"lang"
 53#define __BITOPS_XOR		"laxg"
 54#define __BITOPS_BARRIER	"bcr	14,0\n"
 55
 56#define __BITOPS_LOOP(__addr, __val, __op_string, __barrier)	\
 57({								\
 58	unsigned long __old;					\
 59								\
 60	typecheck(unsigned long *, (__addr));			\
 61	asm volatile(						\
 62		__op_string "	%0,%2,%1\n"			\
 63		__barrier					\
 64		: "=d" (__old),	"+Q" (*(__addr))		\
 65		: "d" (__val)					\
 66		: "cc", "memory");				\
 67	__old;							\
 68})
 69
 70#else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
 71
 72#define __BITOPS_OR		"ogr"
 73#define __BITOPS_AND		"ngr"
 74#define __BITOPS_XOR		"xgr"
 75#define __BITOPS_BARRIER	"\n"
 76
 77#define __BITOPS_LOOP(__addr, __val, __op_string, __barrier)	\
 78({								\
 79	unsigned long __old, __new;				\
 80								\
 81	typecheck(unsigned long *, (__addr));			\
 82	asm volatile(						\
 83		"	lg	%0,%2\n"			\
 84		"0:	lgr	%1,%0\n"			\
 85		__op_string "	%1,%3\n"			\
 86		"	csg	%0,%1,%2\n"			\
 87		"	jl	0b"				\
 88		: "=&d" (__old), "=&d" (__new), "+Q" (*(__addr))\
 89		: "d" (__val)					\
 90		: "cc", "memory");				\
 91	__old;							\
 92})
 93
 94#endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
 95
 96#define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
 97
 98static inline unsigned long *
 99__bitops_word(unsigned long nr, volatile unsigned long *ptr)
100{
101	unsigned long addr;
102
103	addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
104	return (unsigned long *)addr;
105}
106
107static inline unsigned char *
108__bitops_byte(unsigned long nr, volatile unsigned long *ptr)
109{
110	return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
111}
112
113static inline void set_bit(unsigned long nr, volatile unsigned long *ptr)
114{
115	unsigned long *addr = __bitops_word(nr, ptr);
116	unsigned long mask;
117
118#ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
119	if (__builtin_constant_p(nr)) {
120		unsigned char *caddr = __bitops_byte(nr, ptr);
121
122		asm volatile(
123			"oi	%0,%b1\n"
124			: "+Q" (*caddr)
125			: "i" (1 << (nr & 7))
126			: "cc", "memory");
127		return;
128	}
129#endif
130	mask = 1UL << (nr & (BITS_PER_LONG - 1));
131	__BITOPS_LOOP(addr, mask, __BITOPS_OR, __BITOPS_NO_BARRIER);
132}
133
134static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr)
135{
136	unsigned long *addr = __bitops_word(nr, ptr);
137	unsigned long mask;
138
139#ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
140	if (__builtin_constant_p(nr)) {
141		unsigned char *caddr = __bitops_byte(nr, ptr);
142
143		asm volatile(
144			"ni	%0,%b1\n"
145			: "+Q" (*caddr)
146			: "i" (~(1 << (nr & 7)))
147			: "cc", "memory");
148		return;
149	}
150#endif
151	mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
152	__BITOPS_LOOP(addr, mask, __BITOPS_AND, __BITOPS_NO_BARRIER);
153}
154
155static inline void change_bit(unsigned long nr, volatile unsigned long *ptr)
 
156{
157	unsigned long *addr = __bitops_word(nr, ptr);
158	unsigned long mask;
159
160#ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
161	if (__builtin_constant_p(nr)) {
162		unsigned char *caddr = __bitops_byte(nr, ptr);
163
164		asm volatile(
165			"xi	%0,%b1\n"
166			: "+Q" (*caddr)
167			: "i" (1 << (nr & 7))
168			: "cc", "memory");
169		return;
170	}
171#endif
172	mask = 1UL << (nr & (BITS_PER_LONG - 1));
173	__BITOPS_LOOP(addr, mask, __BITOPS_XOR, __BITOPS_NO_BARRIER);
174}
175
176static inline int
177test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
178{
179	unsigned long *addr = __bitops_word(nr, ptr);
180	unsigned long old, mask;
 
181
182	mask = 1UL << (nr & (BITS_PER_LONG - 1));
183	old = __BITOPS_LOOP(addr, mask, __BITOPS_OR, __BITOPS_BARRIER);
184	return (old & mask) != 0;
185}
186
187static inline int
188test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
189{
190	unsigned long *addr = __bitops_word(nr, ptr);
191	unsigned long old, mask;
 
192
193	mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
194	old = __BITOPS_LOOP(addr, mask, __BITOPS_AND, __BITOPS_BARRIER);
195	return (old & ~mask) != 0;
196}
197
198static inline int
199test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
200{
201	unsigned long *addr = __bitops_word(nr, ptr);
202	unsigned long old, mask;
 
203
204	mask = 1UL << (nr & (BITS_PER_LONG - 1));
205	old = __BITOPS_LOOP(addr, mask, __BITOPS_XOR, __BITOPS_BARRIER);
206	return (old & mask) != 0;
207}
208
209static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
 
210{
211	unsigned char *addr = __bitops_byte(nr, ptr);
 
212
213	*addr |= 1 << (nr & 7);
214}
215
216static inline void 
217__clear_bit(unsigned long nr, volatile unsigned long *ptr)
218{
219	unsigned char *addr = __bitops_byte(nr, ptr);
 
220
221	*addr &= ~(1 << (nr & 7));
222}
223
224static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
 
225{
226	unsigned char *addr = __bitops_byte(nr, ptr);
 
227
228	*addr ^= 1 << (nr & 7);
229}
230
231static inline int
232__test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
233{
234	unsigned char *addr = __bitops_byte(nr, ptr);
235	unsigned char ch;
 
236
237	ch = *addr;
238	*addr |= 1 << (nr & 7);
239	return (ch >> (nr & 7)) & 1;
240}
241
242static inline int
243__test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
244{
245	unsigned char *addr = __bitops_byte(nr, ptr);
246	unsigned char ch;
 
247
248	ch = *addr;
249	*addr &= ~(1 << (nr & 7));
250	return (ch >> (nr & 7)) & 1;
251}
252
253static inline int
254__test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
255{
256	unsigned char *addr = __bitops_byte(nr, ptr);
257	unsigned char ch;
 
258
259	ch = *addr;
260	*addr ^= 1 << (nr & 7);
261	return (ch >> (nr & 7)) & 1;
262}
263
264static inline int test_bit(unsigned long nr, const volatile unsigned long *ptr)
265{
266	const volatile unsigned char *addr;
267
268	addr = ((const volatile unsigned char *)ptr);
269	addr += (nr ^ (BITS_PER_LONG - 8)) >> 3;
270	return (*addr >> (nr & 7)) & 1;
271}
272
273static inline int test_and_set_bit_lock(unsigned long nr,
274					volatile unsigned long *ptr)
275{
276	if (test_bit(nr, ptr))
277		return 1;
278	return test_and_set_bit(nr, ptr);
279}
280
281static inline void clear_bit_unlock(unsigned long nr,
282				    volatile unsigned long *ptr)
283{
284	smp_mb__before_atomic();
285	clear_bit(nr, ptr);
286}
287
288static inline void __clear_bit_unlock(unsigned long nr,
289				      volatile unsigned long *ptr)
290{
291	smp_mb();
292	__clear_bit(nr, ptr);
293}
294
 
 
 
 
295/*
296 * Functions which use MSB0 bit numbering.
297 * The bits are numbered:
298 *   |0..............63|64............127|128...........191|192...........255|
299 */
300unsigned long find_first_bit_inv(const unsigned long *addr, unsigned long size);
301unsigned long find_next_bit_inv(const unsigned long *addr, unsigned long size,
302				unsigned long offset);
303
 
 
 
 
 
304static inline void set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
305{
306	return set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
307}
308
309static inline void clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
310{
311	return clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
312}
313
 
 
 
 
 
 
314static inline void __set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
315{
316	return __set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
317}
318
319static inline void __clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
320{
321	return __clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
322}
323
324static inline int test_bit_inv(unsigned long nr,
325			       const volatile unsigned long *ptr)
326{
327	return test_bit(nr ^ (BITS_PER_LONG - 1), ptr);
328}
329
330#ifdef CONFIG_HAVE_MARCH_Z9_109_FEATURES
331
332/**
333 * __flogr - find leftmost one
334 * @word - The word to search
335 *
336 * Returns the bit number of the most significant bit set,
337 * where the most significant bit has bit number 0.
338 * If no bit is set this function returns 64.
339 */
340static inline unsigned char __flogr(unsigned long word)
341{
342	if (__builtin_constant_p(word)) {
343		unsigned long bit = 0;
344
345		if (!word)
346			return 64;
347		if (!(word & 0xffffffff00000000UL)) {
348			word <<= 32;
349			bit += 32;
350		}
351		if (!(word & 0xffff000000000000UL)) {
352			word <<= 16;
353			bit += 16;
354		}
355		if (!(word & 0xff00000000000000UL)) {
356			word <<= 8;
357			bit += 8;
358		}
359		if (!(word & 0xf000000000000000UL)) {
360			word <<= 4;
361			bit += 4;
362		}
363		if (!(word & 0xc000000000000000UL)) {
364			word <<= 2;
365			bit += 2;
366		}
367		if (!(word & 0x8000000000000000UL)) {
368			word <<= 1;
369			bit += 1;
370		}
371		return bit;
372	} else {
373		register unsigned long bit asm("4") = word;
374		register unsigned long out asm("5");
375
 
376		asm volatile(
377			"       flogr   %[bit],%[bit]\n"
378			: [bit] "+d" (bit), [out] "=d" (out) : : "cc");
379		return bit;
380	}
381}
382
383/**
384 * __ffs - find first bit in word.
385 * @word: The word to search
386 *
387 * Undefined if no bit exists, so code should check against 0 first.
388 */
389static inline unsigned long __ffs(unsigned long word)
390{
391	return __flogr(-word & word) ^ (BITS_PER_LONG - 1);
392}
393
394/**
395 * ffs - find first bit set
396 * @word: the word to search
397 *
398 * This is defined the same way as the libc and
399 * compiler builtin ffs routines (man ffs).
400 */
401static inline int ffs(int word)
402{
403	unsigned long mask = 2 * BITS_PER_LONG - 1;
404	unsigned int val = (unsigned int)word;
405
406	return (1 + (__flogr(-val & val) ^ (BITS_PER_LONG - 1))) & mask;
407}
408
409/**
410 * __fls - find last (most-significant) set bit in a long word
411 * @word: the word to search
412 *
413 * Undefined if no set bit exists, so code should check against 0 first.
414 */
415static inline unsigned long __fls(unsigned long word)
416{
417	return __flogr(word) ^ (BITS_PER_LONG - 1);
418}
419
420/**
421 * fls64 - find last set bit in a 64-bit word
422 * @word: the word to search
423 *
424 * This is defined in a similar way as the libc and compiler builtin
425 * ffsll, but returns the position of the most significant set bit.
426 *
427 * fls64(value) returns 0 if value is 0 or the position of the last
428 * set bit if value is nonzero. The last (most significant) bit is
429 * at position 64.
430 */
431static inline int fls64(unsigned long word)
432{
433	unsigned long mask = 2 * BITS_PER_LONG - 1;
434
435	return (1 + (__flogr(word) ^ (BITS_PER_LONG - 1))) & mask;
436}
437
438/**
439 * fls - find last (most-significant) bit set
440 * @word: the word to search
441 *
442 * This is defined the same way as ffs.
443 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
444 */
445static inline int fls(int word)
446{
447	return fls64((unsigned int)word);
448}
449
450#else /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
451
452#include <asm-generic/bitops/__ffs.h>
453#include <asm-generic/bitops/ffs.h>
454#include <asm-generic/bitops/__fls.h>
455#include <asm-generic/bitops/fls.h>
456#include <asm-generic/bitops/fls64.h>
457
458#endif /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
459
460#include <asm-generic/bitops/ffz.h>
461#include <asm-generic/bitops/find.h>
462#include <asm-generic/bitops/hweight.h>
463#include <asm-generic/bitops/sched.h>
464#include <asm-generic/bitops/le.h>
465#include <asm-generic/bitops/ext2-atomic-setbit.h>
466
467#endif /* _S390_BITOPS_H */