Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _TOOLS_LINUX_BITOPS_H_
3#define _TOOLS_LINUX_BITOPS_H_
4
5#include <asm/types.h>
6#include <linux/compiler.h>
7
8#ifndef __WORDSIZE
9#define __WORDSIZE (__SIZEOF_LONG__ * 8)
10#endif
11
12#ifndef BITS_PER_LONG
13# define BITS_PER_LONG __WORDSIZE
14#endif
15
16#define BIT_MASK(nr) (1UL << ((nr) % BITS_PER_LONG))
17#define BIT_WORD(nr) ((nr) / BITS_PER_LONG)
18#define BITS_PER_BYTE 8
19#define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long))
20#define BITS_TO_U64(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(u64))
21#define BITS_TO_U32(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(u32))
22#define BITS_TO_BYTES(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE)
23
24extern unsigned int __sw_hweight8(unsigned int w);
25extern unsigned int __sw_hweight16(unsigned int w);
26extern unsigned int __sw_hweight32(unsigned int w);
27extern unsigned long __sw_hweight64(__u64 w);
28
29/*
30 * Include this here because some architectures need generic_ffs/fls in
31 * scope
32 *
33 * XXX: this needs to be asm/bitops.h, when we get to per arch optimizations
34 */
35#include <asm-generic/bitops.h>
36
37#define for_each_set_bit(bit, addr, size) \
38 for ((bit) = find_first_bit((addr), (size)); \
39 (bit) < (size); \
40 (bit) = find_next_bit((addr), (size), (bit) + 1))
41
42#define for_each_clear_bit(bit, addr, size) \
43 for ((bit) = find_first_zero_bit((addr), (size)); \
44 (bit) < (size); \
45 (bit) = find_next_zero_bit((addr), (size), (bit) + 1))
46
47/* same as for_each_set_bit() but use bit as value to start with */
48#define for_each_set_bit_from(bit, addr, size) \
49 for ((bit) = find_next_bit((addr), (size), (bit)); \
50 (bit) < (size); \
51 (bit) = find_next_bit((addr), (size), (bit) + 1))
52
53static inline unsigned long hweight_long(unsigned long w)
54{
55 return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
56}
57
58static inline unsigned fls_long(unsigned long l)
59{
60 if (sizeof(l) == 4)
61 return fls(l);
62 return fls64(l);
63}
64
65/**
66 * rol32 - rotate a 32-bit value left
67 * @word: value to rotate
68 * @shift: bits to roll
69 */
70static inline __u32 rol32(__u32 word, unsigned int shift)
71{
72 return (word << shift) | (word >> ((-shift) & 31));
73}
74
75#endif
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _TOOLS_LINUX_BITOPS_H_
3#define _TOOLS_LINUX_BITOPS_H_
4
5#include <asm/types.h>
6#include <limits.h>
7#ifndef __WORDSIZE
8#define __WORDSIZE (__SIZEOF_LONG__ * 8)
9#endif
10
11#ifndef BITS_PER_LONG
12# define BITS_PER_LONG __WORDSIZE
13#endif
14#include <linux/bits.h>
15#include <linux/compiler.h>
16
17#define BITS_PER_TYPE(type) (sizeof(type) * BITS_PER_BYTE)
18#define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(long))
19#define BITS_TO_U64(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(u64))
20#define BITS_TO_U32(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(u32))
21#define BITS_TO_BYTES(nr) DIV_ROUND_UP(nr, BITS_PER_TYPE(char))
22
23extern unsigned int __sw_hweight8(unsigned int w);
24extern unsigned int __sw_hweight16(unsigned int w);
25extern unsigned int __sw_hweight32(unsigned int w);
26extern unsigned long __sw_hweight64(__u64 w);
27
28/*
29 * Include this here because some architectures need generic_ffs/fls in
30 * scope
31 *
32 * XXX: this needs to be asm/bitops.h, when we get to per arch optimizations
33 */
34#include <asm-generic/bitops.h>
35
36#define for_each_set_bit(bit, addr, size) \
37 for ((bit) = find_first_bit((addr), (size)); \
38 (bit) < (size); \
39 (bit) = find_next_bit((addr), (size), (bit) + 1))
40
41#define for_each_clear_bit(bit, addr, size) \
42 for ((bit) = find_first_zero_bit((addr), (size)); \
43 (bit) < (size); \
44 (bit) = find_next_zero_bit((addr), (size), (bit) + 1))
45
46/* same as for_each_set_bit() but use bit as value to start with */
47#define for_each_set_bit_from(bit, addr, size) \
48 for ((bit) = find_next_bit((addr), (size), (bit)); \
49 (bit) < (size); \
50 (bit) = find_next_bit((addr), (size), (bit) + 1))
51
52static inline unsigned long hweight_long(unsigned long w)
53{
54 return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
55}
56
57static inline unsigned fls_long(unsigned long l)
58{
59 if (sizeof(l) == 4)
60 return fls(l);
61 return fls64(l);
62}
63
64/**
65 * rol32 - rotate a 32-bit value left
66 * @word: value to rotate
67 * @shift: bits to roll
68 */
69static inline __u32 rol32(__u32 word, unsigned int shift)
70{
71 return (word << shift) | (word >> ((-shift) & 31));
72}
73
74#endif