Linux Audio

Check our new training course

Loading...
v3.5.6
  1#ifndef _S390_BITOPS_H
  2#define _S390_BITOPS_H
  3
  4/*
  5 *  include/asm-s390/bitops.h
 
 
 
 
 
 
 
 
 
 
  6 *
  7 *  S390 version
  8 *    Copyright (C) 1999 IBM Deutschland Entwicklung GmbH, IBM Corporation
  9 *    Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com)
 10 *
 11 *  Derived from "include/asm-i386/bitops.h"
 12 *    Copyright (C) 1992, Linus Torvalds
 
 
 
 
 
 13 *
 14 */
 15
 
 
 
 16#ifndef _LINUX_BITOPS_H
 17#error only <linux/bitops.h> can be included directly
 18#endif
 19
 
 20#include <linux/compiler.h>
 
 
 21
 22/*
 23 * 32 bit bitops format:
 24 * bit 0 is the LSB of *addr; bit 31 is the MSB of *addr;
 25 * bit 32 is the LSB of *(addr+4). That combined with the
 26 * big endian byte order on S390 give the following bit
 27 * order in memory:
 28 *    1f 1e 1d 1c 1b 1a 19 18 17 16 15 14 13 12 11 10 \
 29 *    0f 0e 0d 0c 0b 0a 09 08 07 06 05 04 03 02 01 00
 30 * after that follows the next long with bit numbers
 31 *    3f 3e 3d 3c 3b 3a 39 38 37 36 35 34 33 32 31 30
 32 *    2f 2e 2d 2c 2b 2a 29 28 27 26 25 24 23 22 21 20
 33 * The reason for this bit ordering is the fact that
 34 * in the architecture independent code bits operations
 35 * of the form "flags |= (1 << bitnr)" are used INTERMIXED
 36 * with operation of the form "set_bit(bitnr, flags)".
 37 *
 38 * 64 bit bitops format:
 39 * bit 0 is the LSB of *addr; bit 63 is the MSB of *addr;
 40 * bit 64 is the LSB of *(addr+8). That combined with the
 41 * big endian byte order on S390 give the following bit
 42 * order in memory:
 43 *    3f 3e 3d 3c 3b 3a 39 38 37 36 35 34 33 32 31 30
 44 *    2f 2e 2d 2c 2b 2a 29 28 27 26 25 24 23 22 21 20
 45 *    1f 1e 1d 1c 1b 1a 19 18 17 16 15 14 13 12 11 10
 46 *    0f 0e 0d 0c 0b 0a 09 08 07 06 05 04 03 02 01 00
 47 * after that follows the next long with bit numbers
 48 *    7f 7e 7d 7c 7b 7a 79 78 77 76 75 74 73 72 71 70
 49 *    6f 6e 6d 6c 6b 6a 69 68 67 66 65 64 63 62 61 60
 50 *    5f 5e 5d 5c 5b 5a 59 58 57 56 55 54 53 52 51 50
 51 *    4f 4e 4d 4c 4b 4a 49 48 47 46 45 44 43 42 41 40
 52 * The reason for this bit ordering is the fact that
 53 * in the architecture independent code bits operations
 54 * of the form "flags |= (1 << bitnr)" are used INTERMIXED
 55 * with operation of the form "set_bit(bitnr, flags)".
 56 */
 57
 58/* bitmap tables from arch/s390/kernel/bitmap.c */
 59extern const char _oi_bitmap[];
 60extern const char _ni_bitmap[];
 61extern const char _zb_findmap[];
 62extern const char _sb_findmap[];
 63
 64#ifndef CONFIG_64BIT
 65
 66#define __BITOPS_ALIGN		3
 67#define __BITOPS_WORDSIZE	32
 68#define __BITOPS_OR		"or"
 69#define __BITOPS_AND		"nr"
 70#define __BITOPS_XOR		"xr"
 71
 72#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string)	\
 73	asm volatile(						\
 74		"	l	%0,%2\n"			\
 75		"0:	lr	%1,%0\n"			\
 76		__op_string "	%1,%3\n"			\
 77		"	cs	%0,%1,%2\n"			\
 78		"	jl	0b"				\
 79		: "=&d" (__old), "=&d" (__new),			\
 80		  "=Q" (*(unsigned long *) __addr)		\
 81		: "d" (__val), "Q" (*(unsigned long *) __addr)	\
 82		: "cc");
 83
 84#else /* CONFIG_64BIT */
 85
 86#define __BITOPS_ALIGN		7
 87#define __BITOPS_WORDSIZE	64
 88#define __BITOPS_OR		"ogr"
 89#define __BITOPS_AND		"ngr"
 90#define __BITOPS_XOR		"xgr"
 91
 92#define __BITOPS_LOOP(__old, __new, __addr, __val, __op_string)	\
 93	asm volatile(						\
 94		"	lg	%0,%2\n"			\
 95		"0:	lgr	%1,%0\n"			\
 96		__op_string "	%1,%3\n"			\
 97		"	csg	%0,%1,%2\n"			\
 98		"	jl	0b"				\
 99		: "=&d" (__old), "=&d" (__new),			\
100		  "=Q" (*(unsigned long *) __addr)		\
101		: "d" (__val), "Q" (*(unsigned long *) __addr)	\
102		: "cc");
103
104#endif /* CONFIG_64BIT */
 
 
105
106#define __BITOPS_WORDS(bits) (((bits)+__BITOPS_WORDSIZE-1)/__BITOPS_WORDSIZE)
107#define __BITOPS_BARRIER() asm volatile("" : : : "memory")
 
 
 
108
109#ifdef CONFIG_SMP
110/*
111 * SMP safe set_bit routine based on compare and swap (CS)
112 */
113static inline void set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
114{
115        unsigned long addr, old, new, mask;
 
 
 
 
 
116
117	addr = (unsigned long) ptr;
118	/* calculate address for CS */
119	addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
120	/* make OR mask */
121	mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1));
122	/* Do the atomic update. */
123	__BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR);
 
 
 
124}
125
126/*
127 * SMP safe clear_bit routine based on compare and swap (CS)
128 */
129static inline void clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
130{
131        unsigned long addr, old, new, mask;
 
 
 
 
 
132
133	addr = (unsigned long) ptr;
134	/* calculate address for CS */
135	addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
136	/* make AND mask */
137	mask = ~(1UL << (nr & (__BITOPS_WORDSIZE - 1)));
138	/* Do the atomic update. */
139	__BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND);
 
 
 
140}
141
142/*
143 * SMP safe change_bit routine based on compare and swap (CS)
144 */
145static inline void change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
146{
147        unsigned long addr, old, new, mask;
 
 
 
 
 
148
149	addr = (unsigned long) ptr;
150	/* calculate address for CS */
151	addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
152	/* make XOR mask */
153	mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1));
154	/* Do the atomic update. */
155	__BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR);
 
 
 
156}
157
158/*
159 * SMP safe test_and_set_bit routine based on compare and swap (CS)
160 */
161static inline int
162test_and_set_bit_cs(unsigned long nr, volatile unsigned long *ptr)
163{
164        unsigned long addr, old, new, mask;
 
165
166	addr = (unsigned long) ptr;
167	/* calculate address for CS */
168	addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
169	/* make OR/test mask */
170	mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1));
171	/* Do the atomic update. */
172	__BITOPS_LOOP(old, new, addr, mask, __BITOPS_OR);
173	__BITOPS_BARRIER();
174	return (old & mask) != 0;
175}
176
177/*
178 * SMP safe test_and_clear_bit routine based on compare and swap (CS)
179 */
180static inline int
181test_and_clear_bit_cs(unsigned long nr, volatile unsigned long *ptr)
182{
183        unsigned long addr, old, new, mask;
 
184
185	addr = (unsigned long) ptr;
186	/* calculate address for CS */
187	addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
188	/* make AND/test mask */
189	mask = ~(1UL << (nr & (__BITOPS_WORDSIZE - 1)));
190	/* Do the atomic update. */
191	__BITOPS_LOOP(old, new, addr, mask, __BITOPS_AND);
192	__BITOPS_BARRIER();
193	return (old ^ new) != 0;
194}
195
196/*
197 * SMP safe test_and_change_bit routine based on compare and swap (CS) 
198 */
199static inline int
200test_and_change_bit_cs(unsigned long nr, volatile unsigned long *ptr)
201{
202        unsigned long addr, old, new, mask;
 
203
204	addr = (unsigned long) ptr;
205	/* calculate address for CS */
206	addr += (nr ^ (nr & (__BITOPS_WORDSIZE - 1))) >> 3;
207	/* make XOR/test mask */
208	mask = 1UL << (nr & (__BITOPS_WORDSIZE - 1));
209	/* Do the atomic update. */
210	__BITOPS_LOOP(old, new, addr, mask, __BITOPS_XOR);
211	__BITOPS_BARRIER();
212	return (old & mask) != 0;
213}
214#endif /* CONFIG_SMP */
215
216/*
217 * fast, non-SMP set_bit routine
218 */
219static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
220{
221	unsigned long addr;
222
223	addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
224	asm volatile(
225		"	oc	%O0(1,%R0),%1"
226		: "=Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7]) : "cc" );
227}
228
229static inline void 
230__constant_set_bit(const unsigned long nr, volatile unsigned long *ptr)
231{
232	unsigned long addr;
233
234	addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
235	*(unsigned char *) addr |= 1 << (nr & 7);
236}
237
238#define set_bit_simple(nr,addr) \
239(__builtin_constant_p((nr)) ? \
240 __constant_set_bit((nr),(addr)) : \
241 __set_bit((nr),(addr)) )
242
243/*
244 * fast, non-SMP clear_bit routine
245 */
246static inline void 
247__clear_bit(unsigned long nr, volatile unsigned long *ptr)
248{
249	unsigned long addr;
250
251	addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
252	asm volatile(
253		"	nc	%O0(1,%R0),%1"
254		: "=Q" (*(char *) addr) : "Q" (_ni_bitmap[nr & 7]) : "cc" );
255}
256
257static inline void 
258__constant_clear_bit(const unsigned long nr, volatile unsigned long *ptr)
259{
260	unsigned long addr;
261
262	addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
263	*(unsigned char *) addr &= ~(1 << (nr & 7));
264}
265
266#define clear_bit_simple(nr,addr) \
267(__builtin_constant_p((nr)) ? \
268 __constant_clear_bit((nr),(addr)) : \
269 __clear_bit((nr),(addr)) )
270
271/* 
272 * fast, non-SMP change_bit routine 
273 */
274static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
275{
276	unsigned long addr;
277
278	addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
279	asm volatile(
280		"	xc	%O0(1,%R0),%1"
281		: "=Q" (*(char *) addr) : "Q" (_oi_bitmap[nr & 7]) : "cc" );
282}
283
284static inline void 
285__constant_change_bit(const unsigned long nr, volatile unsigned long *ptr) 
286{
287	unsigned long addr;
288
289	addr = ((unsigned long) ptr) + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
290	*(unsigned char *) addr ^= 1 << (nr & 7);
291}
292
293#define change_bit_simple(nr,addr) \
294(__builtin_constant_p((nr)) ? \
295 __constant_change_bit((nr),(addr)) : \
296 __change_bit((nr),(addr)) )
297
298/*
299 * fast, non-SMP test_and_set_bit routine
300 */
301static inline int
302test_and_set_bit_simple(unsigned long nr, volatile unsigned long *ptr)
303{
304	unsigned long addr;
305	unsigned char ch;
306
307	addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
308	ch = *(unsigned char *) addr;
309	asm volatile(
310		"	oc	%O0(1,%R0),%1"
311		: "=Q" (*(char *) addr)	: "Q" (_oi_bitmap[nr & 7])
312		: "cc", "memory");
313	return (ch >> (nr & 7)) & 1;
314}
315#define __test_and_set_bit(X,Y)		test_and_set_bit_simple(X,Y)
316
317/*
318 * fast, non-SMP test_and_clear_bit routine
319 */
320static inline int
321test_and_clear_bit_simple(unsigned long nr, volatile unsigned long *ptr)
322{
323	unsigned long addr;
324	unsigned char ch;
325
326	addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
327	ch = *(unsigned char *) addr;
328	asm volatile(
329		"	nc	%O0(1,%R0),%1"
330		: "=Q" (*(char *) addr)	: "Q" (_ni_bitmap[nr & 7])
331		: "cc", "memory");
332	return (ch >> (nr & 7)) & 1;
333}
334#define __test_and_clear_bit(X,Y)	test_and_clear_bit_simple(X,Y)
335
336/*
337 * fast, non-SMP test_and_change_bit routine
338 */
339static inline int
340test_and_change_bit_simple(unsigned long nr, volatile unsigned long *ptr)
341{
342	unsigned long addr;
343	unsigned char ch;
344
345	addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
346	ch = *(unsigned char *) addr;
347	asm volatile(
348		"	xc	%O0(1,%R0),%1"
349		: "=Q" (*(char *) addr)	: "Q" (_oi_bitmap[nr & 7])
350		: "cc", "memory");
351	return (ch >> (nr & 7)) & 1;
352}
353#define __test_and_change_bit(X,Y)	test_and_change_bit_simple(X,Y)
354
355#ifdef CONFIG_SMP
356#define set_bit             set_bit_cs
357#define clear_bit           clear_bit_cs
358#define change_bit          change_bit_cs
359#define test_and_set_bit    test_and_set_bit_cs
360#define test_and_clear_bit  test_and_clear_bit_cs
361#define test_and_change_bit test_and_change_bit_cs
362#else
363#define set_bit             set_bit_simple
364#define clear_bit           clear_bit_simple
365#define change_bit          change_bit_simple
366#define test_and_set_bit    test_and_set_bit_simple
367#define test_and_clear_bit  test_and_clear_bit_simple
368#define test_and_change_bit test_and_change_bit_simple
369#endif
370
 
 
 
371
372/*
373 * This routine doesn't need to be atomic.
374 */
 
375
376static inline int __test_bit(unsigned long nr, const volatile unsigned long *ptr)
 
377{
378	unsigned long addr;
379	unsigned char ch;
380
381	addr = (unsigned long) ptr + ((nr ^ (__BITOPS_WORDSIZE - 8)) >> 3);
382	ch = *(volatile unsigned char *) addr;
383	return (ch >> (nr & 7)) & 1;
384}
385
386static inline int 
387__constant_test_bit(unsigned long nr, const volatile unsigned long *addr) {
388    return (((volatile char *) addr)
389	    [(nr^(__BITOPS_WORDSIZE-8))>>3] & (1<<(nr&7))) != 0;
 
390}
391
392#define test_bit(nr,addr) \
393(__builtin_constant_p((nr)) ? \
394 __constant_test_bit((nr),(addr)) : \
395 __test_bit((nr),(addr)) )
 
 
396
397/*
398 * Optimized find bit helper functions.
 
 
399 */
 
 
 
400
401/**
402 * __ffz_word_loop - find byte offset of first long != -1UL
403 * @addr: pointer to array of unsigned long
404 * @size: size of the array in bits
405 */
406static inline unsigned long __ffz_word_loop(const unsigned long *addr,
407					    unsigned long size)
408{
409	typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype;
410	unsigned long bytes = 0;
411
412	asm volatile(
413#ifndef CONFIG_64BIT
414		"	ahi	%1,-1\n"
415		"	sra	%1,5\n"
416		"	jz	1f\n"
417		"0:	c	%2,0(%0,%3)\n"
418		"	jne	1f\n"
419		"	la	%0,4(%0)\n"
420		"	brct	%1,0b\n"
421		"1:\n"
422#else
423		"	aghi	%1,-1\n"
424		"	srag	%1,%1,6\n"
425		"	jz	1f\n"
426		"0:	cg	%2,0(%0,%3)\n"
427		"	jne	1f\n"
428		"	la	%0,8(%0)\n"
429		"	brct	%1,0b\n"
430		"1:\n"
431#endif
432		: "+&a" (bytes), "+&d" (size)
433		: "d" (-1UL), "a" (addr), "m" (*(addrtype *) addr)
434		: "cc" );
435	return bytes;
436}
437
438/**
439 * __ffs_word_loop - find byte offset of first long != 0UL
440 * @addr: pointer to array of unsigned long
441 * @size: size of the array in bits
442 */
443static inline unsigned long __ffs_word_loop(const unsigned long *addr,
444					    unsigned long size)
445{
446	typedef struct { long _[__BITOPS_WORDS(size)]; } addrtype;
447	unsigned long bytes = 0;
448
449	asm volatile(
450#ifndef CONFIG_64BIT
451		"	ahi	%1,-1\n"
452		"	sra	%1,5\n"
453		"	jz	1f\n"
454		"0:	c	%2,0(%0,%3)\n"
455		"	jne	1f\n"
456		"	la	%0,4(%0)\n"
457		"	brct	%1,0b\n"
458		"1:\n"
459#else
460		"	aghi	%1,-1\n"
461		"	srag	%1,%1,6\n"
462		"	jz	1f\n"
463		"0:	cg	%2,0(%0,%3)\n"
464		"	jne	1f\n"
465		"	la	%0,8(%0)\n"
466		"	brct	%1,0b\n"
467		"1:\n"
468#endif
469		: "+&a" (bytes), "+&a" (size)
470		: "d" (0UL), "a" (addr), "m" (*(addrtype *) addr)
471		: "cc" );
472	return bytes;
473}
474
475/**
476 * __ffz_word - add number of the first unset bit
477 * @nr: base value the bit number is added to
478 * @word: the word that is searched for unset bits
479 */
480static inline unsigned long __ffz_word(unsigned long nr, unsigned long word)
481{
482#ifdef CONFIG_64BIT
483	if ((word & 0xffffffff) == 0xffffffff) {
484		word >>= 32;
485		nr += 32;
486	}
487#endif
488	if ((word & 0xffff) == 0xffff) {
489		word >>= 16;
490		nr += 16;
491	}
492	if ((word & 0xff) == 0xff) {
493		word >>= 8;
494		nr += 8;
495	}
496	return nr + _zb_findmap[(unsigned char) word];
497}
498
499/**
500 * __ffs_word - add number of the first set bit
501 * @nr: base value the bit number is added to
502 * @word: the word that is searched for set bits
503 */
504static inline unsigned long __ffs_word(unsigned long nr, unsigned long word)
505{
506#ifdef CONFIG_64BIT
507	if ((word & 0xffffffff) == 0) {
508		word >>= 32;
509		nr += 32;
510	}
511#endif
512	if ((word & 0xffff) == 0) {
513		word >>= 16;
514		nr += 16;
515	}
516	if ((word & 0xff) == 0) {
517		word >>= 8;
518		nr += 8;
519	}
520	return nr + _sb_findmap[(unsigned char) word];
521}
522
523
524/**
525 * __load_ulong_be - load big endian unsigned long
526 * @p: pointer to array of unsigned long
527 * @offset: byte offset of source value in the array
528 */
529static inline unsigned long __load_ulong_be(const unsigned long *p,
530					    unsigned long offset)
531{
532	p = (unsigned long *)((unsigned long) p + offset);
533	return *p;
534}
535
536/**
537 * __load_ulong_le - load little endian unsigned long
538 * @p: pointer to array of unsigned long
539 * @offset: byte offset of source value in the array
540 */
541static inline unsigned long __load_ulong_le(const unsigned long *p,
542					    unsigned long offset)
543{
544	unsigned long word;
545
546	p = (unsigned long *)((unsigned long) p + offset);
547#ifndef CONFIG_64BIT
548	asm volatile(
549		"	ic	%0,%O1(%R1)\n"
550		"	icm	%0,2,%O1+1(%R1)\n"
551		"	icm	%0,4,%O1+2(%R1)\n"
552		"	icm	%0,8,%O1+3(%R1)"
553		: "=&d" (word) : "Q" (*p) : "cc");
554#else
555	asm volatile(
556		"	lrvg	%0,%1"
557		: "=d" (word) : "m" (*p) );
558#endif
559	return word;
560}
561
562/*
563 * The various find bit functions.
564 */
565
566/*
567 * ffz - find first zero in word.
568 * @word: The word to search
569 *
570 * Undefined if no zero exists, so code should check against ~0UL first.
571 */
572static inline unsigned long ffz(unsigned long word)
573{
574	return __ffz_word(0, word);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
575}
576
577/**
578 * __ffs - find first bit in word.
579 * @word: The word to search
580 *
581 * Undefined if no bit exists, so code should check against 0 first.
582 */
583static inline unsigned long __ffs (unsigned long word)
584{
585	return __ffs_word(0, word);
586}
587
588/**
589 * ffs - find first bit set
590 * @x: the word to search
591 *
592 * This is defined the same way as
593 * the libc and compiler builtin ffs routines, therefore
594 * differs in spirit from the above ffz (man ffs).
595 */
596static inline int ffs(int x)
597{
598	if (!x)
599		return 0;
600	return __ffs_word(1, x);
 
601}
602
603/**
604 * find_first_zero_bit - find the first zero bit in a memory region
605 * @addr: The address to start the search at
606 * @size: The maximum size to search
607 *
608 * Returns the bit-number of the first zero bit, not the number of the byte
609 * containing a bit.
610 */
611static inline unsigned long find_first_zero_bit(const unsigned long *addr,
612						unsigned long size)
613{
614	unsigned long bytes, bits;
615
616        if (!size)
617                return 0;
618	bytes = __ffz_word_loop(addr, size);
619	bits = __ffz_word(bytes*8, __load_ulong_be(addr, bytes));
620	return (bits < size) ? bits : size;
621}
622#define find_first_zero_bit find_first_zero_bit
623
624/**
625 * find_first_bit - find the first set bit in a memory region
626 * @addr: The address to start the search at
627 * @size: The maximum size to search
 
 
628 *
629 * Returns the bit-number of the first set bit, not the number of the byte
630 * containing a bit.
 
631 */
632static inline unsigned long find_first_bit(const unsigned long * addr,
633					   unsigned long size)
634{
635	unsigned long bytes, bits;
636
637        if (!size)
638                return 0;
639	bytes = __ffs_word_loop(addr, size);
640	bits = __ffs_word(bytes*8, __load_ulong_be(addr, bytes));
641	return (bits < size) ? bits : size;
642}
643#define find_first_bit find_first_bit
644
645/**
646 * find_next_zero_bit - find the first zero bit in a memory region
647 * @addr: The address to base the search on
648 * @offset: The bitnumber to start searching at
649 * @size: The maximum size to search
650 */
651static inline int find_next_zero_bit (const unsigned long * addr,
652				      unsigned long size,
653				      unsigned long offset)
654{
655        const unsigned long *p;
656	unsigned long bit, set;
657
658	if (offset >= size)
659		return size;
660	bit = offset & (__BITOPS_WORDSIZE - 1);
661	offset -= bit;
662	size -= offset;
663	p = addr + offset / __BITOPS_WORDSIZE;
664	if (bit) {
665		/*
666		 * __ffz_word returns __BITOPS_WORDSIZE
667		 * if no zero bit is present in the word.
668		 */
669		set = __ffz_word(bit, *p >> bit);
670		if (set >= size)
671			return size + offset;
672		if (set < __BITOPS_WORDSIZE)
673			return set + offset;
674		offset += __BITOPS_WORDSIZE;
675		size -= __BITOPS_WORDSIZE;
676		p++;
677	}
678	return offset + find_first_zero_bit(p, size);
679}
680#define find_next_zero_bit find_next_zero_bit
681
682/**
683 * find_next_bit - find the first set bit in a memory region
684 * @addr: The address to base the search on
685 * @offset: The bitnumber to start searching at
686 * @size: The maximum size to search
687 */
688static inline int find_next_bit (const unsigned long * addr,
689				 unsigned long size,
690				 unsigned long offset)
691{
692        const unsigned long *p;
693	unsigned long bit, set;
694
695	if (offset >= size)
696		return size;
697	bit = offset & (__BITOPS_WORDSIZE - 1);
698	offset -= bit;
699	size -= offset;
700	p = addr + offset / __BITOPS_WORDSIZE;
701	if (bit) {
702		/*
703		 * __ffs_word returns __BITOPS_WORDSIZE
704		 * if no one bit is present in the word.
705		 */
706		set = __ffs_word(0, *p & (~0UL << bit));
707		if (set >= size)
708			return size + offset;
709		if (set < __BITOPS_WORDSIZE)
710			return set + offset;
711		offset += __BITOPS_WORDSIZE;
712		size -= __BITOPS_WORDSIZE;
713		p++;
714	}
715	return offset + find_first_bit(p, size);
716}
717#define find_next_bit find_next_bit
718
719/*
720 * Every architecture must define this function. It's the fastest
721 * way of searching a 140-bit bitmap where the first 100 bits are
722 * unlikely to be set. It's guaranteed that at least one of the 140
723 * bits is cleared.
724 */
725static inline int sched_find_first_bit(unsigned long *b)
726{
727	return find_first_bit(b, 140);
728}
729
730#include <asm-generic/bitops/fls.h>
 
 
 
731#include <asm-generic/bitops/__fls.h>
 
732#include <asm-generic/bitops/fls64.h>
733
734#include <asm-generic/bitops/hweight.h>
735#include <asm-generic/bitops/lock.h>
736
737/*
738 * ATTENTION: intel byte ordering convention for ext2 and minix !!
739 * bit 0 is the LSB of addr; bit 31 is the MSB of addr;
740 * bit 32 is the LSB of (addr+4).
741 * That combined with the little endian byte order of Intel gives the
742 * following bit order in memory:
743 *    07 06 05 04 03 02 01 00 15 14 13 12 11 10 09 08 \
744 *    23 22 21 20 19 18 17 16 31 30 29 28 27 26 25 24
745 */
746
747static inline int find_first_zero_bit_le(void *vaddr, unsigned int size)
748{
749	unsigned long bytes, bits;
750
751        if (!size)
752                return 0;
753	bytes = __ffz_word_loop(vaddr, size);
754	bits = __ffz_word(bytes*8, __load_ulong_le(vaddr, bytes));
755	return (bits < size) ? bits : size;
756}
757#define find_first_zero_bit_le find_first_zero_bit_le
758
759static inline int find_next_zero_bit_le(void *vaddr, unsigned long size,
760					  unsigned long offset)
761{
762        unsigned long *addr = vaddr, *p;
763	unsigned long bit, set;
764
765        if (offset >= size)
766                return size;
767	bit = offset & (__BITOPS_WORDSIZE - 1);
768	offset -= bit;
769	size -= offset;
770	p = addr + offset / __BITOPS_WORDSIZE;
771        if (bit) {
772		/*
773		 * s390 version of ffz returns __BITOPS_WORDSIZE
774		 * if no zero bit is present in the word.
775		 */
776		set = __ffz_word(bit, __load_ulong_le(p, 0) >> bit);
777		if (set >= size)
778			return size + offset;
779		if (set < __BITOPS_WORDSIZE)
780			return set + offset;
781		offset += __BITOPS_WORDSIZE;
782		size -= __BITOPS_WORDSIZE;
783		p++;
784        }
785	return offset + find_first_zero_bit_le(p, size);
786}
787#define find_next_zero_bit_le find_next_zero_bit_le
788
789static inline unsigned long find_first_bit_le(void *vaddr, unsigned long size)
790{
791	unsigned long bytes, bits;
792
793	if (!size)
794		return 0;
795	bytes = __ffs_word_loop(vaddr, size);
796	bits = __ffs_word(bytes*8, __load_ulong_le(vaddr, bytes));
797	return (bits < size) ? bits : size;
798}
799#define find_first_bit_le find_first_bit_le
800
801static inline int find_next_bit_le(void *vaddr, unsigned long size,
802				     unsigned long offset)
803{
804	unsigned long *addr = vaddr, *p;
805	unsigned long bit, set;
806
807	if (offset >= size)
808		return size;
809	bit = offset & (__BITOPS_WORDSIZE - 1);
810	offset -= bit;
811	size -= offset;
812	p = addr + offset / __BITOPS_WORDSIZE;
813	if (bit) {
814		/*
815		 * s390 version of ffz returns __BITOPS_WORDSIZE
816		 * if no zero bit is present in the word.
817		 */
818		set = __ffs_word(0, __load_ulong_le(p, 0) & (~0UL << bit));
819		if (set >= size)
820			return size + offset;
821		if (set < __BITOPS_WORDSIZE)
822			return set + offset;
823		offset += __BITOPS_WORDSIZE;
824		size -= __BITOPS_WORDSIZE;
825		p++;
826	}
827	return offset + find_first_bit_le(p, size);
828}
829#define find_next_bit_le find_next_bit_le
830
 
 
 
 
831#include <asm-generic/bitops/le.h>
832
833#include <asm-generic/bitops/ext2-atomic-setbit.h>
834
835#endif /* _S390_BITOPS_H */
v4.17
  1/* SPDX-License-Identifier: GPL-2.0 */
 
 
  2/*
  3 *    Copyright IBM Corp. 1999,2013
  4 *
  5 *    Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>,
  6 *
  7 * The description below was taken in large parts from the powerpc
  8 * bitops header file:
  9 * Within a word, bits are numbered LSB first.  Lot's of places make
 10 * this assumption by directly testing bits with (val & (1<<nr)).
 11 * This can cause confusion for large (> 1 word) bitmaps on a
 12 * big-endian system because, unlike little endian, the number of each
 13 * bit depends on the word size.
 14 *
 15 * The bitop functions are defined to work on unsigned longs, so the bits
 16 * end up numbered:
 17 *   |63..............0|127............64|191...........128|255...........192|
 18 *
 19 * We also have special functions which work with an MSB0 encoding.
 20 * The bits are numbered:
 21 *   |0..............63|64............127|128...........191|192...........255|
 22 *
 23 * The main difference is that bit 0-63 in the bit number field needs to be
 24 * reversed compared to the LSB0 encoded bit fields. This can be achieved by
 25 * XOR with 0x3f.
 26 *
 27 */
 28
 29#ifndef _S390_BITOPS_H
 30#define _S390_BITOPS_H
 31
 32#ifndef _LINUX_BITOPS_H
 33#error only <linux/bitops.h> can be included directly
 34#endif
 35
 36#include <linux/typecheck.h>
 37#include <linux/compiler.h>
 38#include <asm/atomic_ops.h>
 39#include <asm/barrier.h>
 40
 41#define __BITOPS_WORDS(bits) (((bits) + BITS_PER_LONG - 1) / BITS_PER_LONG)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 42
 43static inline unsigned long *
 44__bitops_word(unsigned long nr, volatile unsigned long *ptr)
 45{
 46	unsigned long addr;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 47
 48	addr = (unsigned long)ptr + ((nr ^ (nr & (BITS_PER_LONG - 1))) >> 3);
 49	return (unsigned long *)addr;
 50}
 51
 52static inline unsigned char *
 53__bitops_byte(unsigned long nr, volatile unsigned long *ptr)
 54{
 55	return ((unsigned char *)ptr) + ((nr ^ (BITS_PER_LONG - 8)) >> 3);
 56}
 57
 58static inline void set_bit(unsigned long nr, volatile unsigned long *ptr)
 
 
 
 
 59{
 60	unsigned long *addr = __bitops_word(nr, ptr);
 61	unsigned long mask;
 62
 63#ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
 64	if (__builtin_constant_p(nr)) {
 65		unsigned char *caddr = __bitops_byte(nr, ptr);
 66
 67		asm volatile(
 68			"oi	%0,%b1\n"
 69			: "+Q" (*caddr)
 70			: "i" (1 << (nr & 7))
 71			: "cc", "memory");
 72		return;
 73	}
 74#endif
 75	mask = 1UL << (nr & (BITS_PER_LONG - 1));
 76	__atomic64_or(mask, addr);
 77}
 78
 79static inline void clear_bit(unsigned long nr, volatile unsigned long *ptr)
 
 
 
 80{
 81	unsigned long *addr = __bitops_word(nr, ptr);
 82	unsigned long mask;
 83
 84#ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
 85	if (__builtin_constant_p(nr)) {
 86		unsigned char *caddr = __bitops_byte(nr, ptr);
 87
 88		asm volatile(
 89			"ni	%0,%b1\n"
 90			: "+Q" (*caddr)
 91			: "i" (~(1 << (nr & 7)))
 92			: "cc", "memory");
 93		return;
 94	}
 95#endif
 96	mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
 97	__atomic64_and(mask, addr);
 98}
 99
100static inline void change_bit(unsigned long nr, volatile unsigned long *ptr)
 
 
 
101{
102	unsigned long *addr = __bitops_word(nr, ptr);
103	unsigned long mask;
104
105#ifdef CONFIG_HAVE_MARCH_ZEC12_FEATURES
106	if (__builtin_constant_p(nr)) {
107		unsigned char *caddr = __bitops_byte(nr, ptr);
108
109		asm volatile(
110			"xi	%0,%b1\n"
111			: "+Q" (*caddr)
112			: "i" (1 << (nr & 7))
113			: "cc", "memory");
114		return;
115	}
116#endif
117	mask = 1UL << (nr & (BITS_PER_LONG - 1));
118	__atomic64_xor(mask, addr);
119}
120
 
 
 
121static inline int
122test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
123{
124	unsigned long *addr = __bitops_word(nr, ptr);
125	unsigned long old, mask;
126
127	mask = 1UL << (nr & (BITS_PER_LONG - 1));
128	old = __atomic64_or_barrier(mask, addr);
 
 
 
 
 
 
129	return (old & mask) != 0;
130}
131
 
 
 
132static inline int
133test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
134{
135	unsigned long *addr = __bitops_word(nr, ptr);
136	unsigned long old, mask;
137
138	mask = ~(1UL << (nr & (BITS_PER_LONG - 1)));
139	old = __atomic64_and_barrier(mask, addr);
140	return (old & ~mask) != 0;
 
 
 
 
 
 
141}
142
 
 
 
143static inline int
144test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
145{
146	unsigned long *addr = __bitops_word(nr, ptr);
147	unsigned long old, mask;
148
149	mask = 1UL << (nr & (BITS_PER_LONG - 1));
150	old = __atomic64_xor_barrier(mask, addr);
 
 
 
 
 
 
151	return (old & mask) != 0;
152}
 
153
 
 
 
154static inline void __set_bit(unsigned long nr, volatile unsigned long *ptr)
155{
156	unsigned char *addr = __bitops_byte(nr, ptr);
157
158	*addr |= 1 << (nr & 7);
 
 
 
159}
160
161static inline void 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
162__clear_bit(unsigned long nr, volatile unsigned long *ptr)
163{
164	unsigned char *addr = __bitops_byte(nr, ptr);
165
166	*addr &= ~(1 << (nr & 7));
 
 
 
167}
168
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
169static inline void __change_bit(unsigned long nr, volatile unsigned long *ptr)
170{
171	unsigned char *addr = __bitops_byte(nr, ptr);
 
 
 
 
 
 
 
 
 
 
 
172
173	*addr ^= 1 << (nr & 7);
 
174}
175
 
 
 
 
 
 
 
 
176static inline int
177__test_and_set_bit(unsigned long nr, volatile unsigned long *ptr)
178{
179	unsigned char *addr = __bitops_byte(nr, ptr);
180	unsigned char ch;
181
182	ch = *addr;
183	*addr |= 1 << (nr & 7);
 
 
 
 
184	return (ch >> (nr & 7)) & 1;
185}
 
186
 
 
 
187static inline int
188__test_and_clear_bit(unsigned long nr, volatile unsigned long *ptr)
189{
190	unsigned char *addr = __bitops_byte(nr, ptr);
191	unsigned char ch;
192
193	ch = *addr;
194	*addr &= ~(1 << (nr & 7));
 
 
 
 
195	return (ch >> (nr & 7)) & 1;
196}
 
197
 
 
 
198static inline int
199__test_and_change_bit(unsigned long nr, volatile unsigned long *ptr)
200{
201	unsigned char *addr = __bitops_byte(nr, ptr);
202	unsigned char ch;
203
204	ch = *addr;
205	*addr ^= 1 << (nr & 7);
 
 
 
 
206	return (ch >> (nr & 7)) & 1;
207}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
208
209static inline int test_bit(unsigned long nr, const volatile unsigned long *ptr)
210{
211	const volatile unsigned char *addr;
212
213	addr = ((const volatile unsigned char *)ptr);
214	addr += (nr ^ (BITS_PER_LONG - 8)) >> 3;
215	return (*addr >> (nr & 7)) & 1;
216}
217
218static inline int test_and_set_bit_lock(unsigned long nr,
219					volatile unsigned long *ptr)
220{
221	if (test_bit(nr, ptr))
222		return 1;
223	return test_and_set_bit(nr, ptr);
 
 
 
224}
225
226static inline void clear_bit_unlock(unsigned long nr,
227				    volatile unsigned long *ptr)
228{
229	smp_mb__before_atomic();
230	clear_bit(nr, ptr);
231}
232
233static inline void __clear_bit_unlock(unsigned long nr,
234				      volatile unsigned long *ptr)
235{
236	smp_mb();
237	__clear_bit(nr, ptr);
238}
239
240/*
241 * Functions which use MSB0 bit numbering.
242 * The bits are numbered:
243 *   |0..............63|64............127|128...........191|192...........255|
244 */
245unsigned long find_first_bit_inv(const unsigned long *addr, unsigned long size);
246unsigned long find_next_bit_inv(const unsigned long *addr, unsigned long size,
247				unsigned long offset);
248
249#define for_each_set_bit_inv(bit, addr, size)				\
250	for ((bit) = find_first_bit_inv((addr), (size));		\
251	     (bit) < (size);						\
252	     (bit) = find_next_bit_inv((addr), (size), (bit) + 1))
 
 
 
 
 
 
253
254static inline void set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
255{
256	return set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
257}
258
259static inline void clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
 
 
 
 
 
 
260{
261	return clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
262}
263
264static inline int test_and_clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
 
 
 
 
 
265{
266	return test_and_clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
267}
268
269static inline void __set_bit_inv(unsigned long nr, volatile unsigned long *ptr)
 
 
 
 
 
270{
271	return __set_bit(nr ^ (BITS_PER_LONG - 1), ptr);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
272}
273
274static inline void __clear_bit_inv(unsigned long nr, volatile unsigned long *ptr)
 
 
 
 
 
 
 
275{
276	return __clear_bit(nr ^ (BITS_PER_LONG - 1), ptr);
 
277}
278
279static inline int test_bit_inv(unsigned long nr,
280			       const volatile unsigned long *ptr)
 
 
 
 
 
281{
282	return test_bit(nr ^ (BITS_PER_LONG - 1), ptr);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
283}
284
285#ifdef CONFIG_HAVE_MARCH_Z9_109_FEATURES
 
 
286
287/**
288 * __flogr - find leftmost one
289 * @word - The word to search
290 *
291 * Returns the bit number of the most significant bit set,
292 * where the most significant bit has bit number 0.
293 * If no bit is set this function returns 64.
294 */
295static inline unsigned char __flogr(unsigned long word)
296{
297	if (__builtin_constant_p(word)) {
298		unsigned long bit = 0;
299
300		if (!word)
301			return 64;
302		if (!(word & 0xffffffff00000000UL)) {
303			word <<= 32;
304			bit += 32;
305		}
306		if (!(word & 0xffff000000000000UL)) {
307			word <<= 16;
308			bit += 16;
309		}
310		if (!(word & 0xff00000000000000UL)) {
311			word <<= 8;
312			bit += 8;
313		}
314		if (!(word & 0xf000000000000000UL)) {
315			word <<= 4;
316			bit += 4;
317		}
318		if (!(word & 0xc000000000000000UL)) {
319			word <<= 2;
320			bit += 2;
321		}
322		if (!(word & 0x8000000000000000UL)) {
323			word <<= 1;
324			bit += 1;
325		}
326		return bit;
327	} else {
328		register unsigned long bit asm("4") = word;
329		register unsigned long out asm("5");
330
331		asm volatile(
332			"       flogr   %[bit],%[bit]\n"
333			: [bit] "+d" (bit), [out] "=d" (out) : : "cc");
334		return bit;
335	}
336}
337
338/**
339 * __ffs - find first bit in word.
340 * @word: The word to search
341 *
342 * Undefined if no bit exists, so code should check against 0 first.
343 */
344static inline unsigned long __ffs(unsigned long word)
345{
346	return __flogr(-word & word) ^ (BITS_PER_LONG - 1);
347}
348
349/**
350 * ffs - find first bit set
351 * @word: the word to search
352 *
353 * This is defined the same way as the libc and
354 * compiler builtin ffs routines (man ffs).
 
355 */
356static inline int ffs(int word)
357{
358	unsigned long mask = 2 * BITS_PER_LONG - 1;
359	unsigned int val = (unsigned int)word;
360
361	return (1 + (__flogr(-val & val) ^ (BITS_PER_LONG - 1))) & mask;
362}
363
364/**
365 * __fls - find last (most-significant) set bit in a long word
366 * @word: the word to search
 
367 *
368 * Undefined if no set bit exists, so code should check against 0 first.
 
369 */
370static inline unsigned long __fls(unsigned long word)
 
371{
372	return __flogr(word) ^ (BITS_PER_LONG - 1);
 
 
 
 
 
 
373}
 
374
375/**
376 * fls64 - find last set bit in a 64-bit word
377 * @word: the word to search
378 *
379 * This is defined in a similar way as the libc and compiler builtin
380 * ffsll, but returns the position of the most significant set bit.
381 *
382 * fls64(value) returns 0 if value is 0 or the position of the last
383 * set bit if value is nonzero. The last (most significant) bit is
384 * at position 64.
385 */
386static inline int fls64(unsigned long word)
 
387{
388	unsigned long mask = 2 * BITS_PER_LONG - 1;
389
390	return (1 + (__flogr(word) ^ (BITS_PER_LONG - 1))) & mask;
 
 
 
 
391}
 
392
393/**
394 * fls - find last (most-significant) bit set
395 * @word: the word to search
396 *
397 * This is defined the same way as ffs.
398 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
399 */
400static inline int fls(int word)
401{
402	return fls64((unsigned int)word);
403}
404
405#else /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
406
407#include <asm-generic/bitops/__ffs.h>
408#include <asm-generic/bitops/ffs.h>
409#include <asm-generic/bitops/__fls.h>
410#include <asm-generic/bitops/fls.h>
411#include <asm-generic/bitops/fls64.h>
412
413#endif /* CONFIG_HAVE_MARCH_Z9_109_FEATURES */
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
414
415#include <asm-generic/bitops/ffz.h>
416#include <asm-generic/bitops/find.h>
417#include <asm-generic/bitops/hweight.h>
418#include <asm-generic/bitops/sched.h>
419#include <asm-generic/bitops/le.h>
 
420#include <asm-generic/bitops/ext2-atomic-setbit.h>
421
422#endif /* _S390_BITOPS_H */