Loading...
1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * Copyright (C) 2014 Felix Fietkau <nbd@nbd.name>
4 * Copyright (C) 2004 - 2009 Ivo van Doorn <IvDoorn@gmail.com>
5 */
6
7#ifndef _LINUX_BITFIELD_H
8#define _LINUX_BITFIELD_H
9
10#include <linux/build_bug.h>
11#include <asm/byteorder.h>
12
13/*
14 * Bitfield access macros
15 *
16 * FIELD_{GET,PREP} macros take as first parameter shifted mask
17 * from which they extract the base mask and shift amount.
18 * Mask must be a compilation time constant.
19 *
20 * Example:
21 *
22 * #define REG_FIELD_A GENMASK(6, 0)
23 * #define REG_FIELD_B BIT(7)
24 * #define REG_FIELD_C GENMASK(15, 8)
25 * #define REG_FIELD_D GENMASK(31, 16)
26 *
27 * Get:
28 * a = FIELD_GET(REG_FIELD_A, reg);
29 * b = FIELD_GET(REG_FIELD_B, reg);
30 *
31 * Set:
32 * reg = FIELD_PREP(REG_FIELD_A, 1) |
33 * FIELD_PREP(REG_FIELD_B, 0) |
34 * FIELD_PREP(REG_FIELD_C, c) |
35 * FIELD_PREP(REG_FIELD_D, 0x40);
36 *
37 * Modify:
38 * reg &= ~REG_FIELD_C;
39 * reg |= FIELD_PREP(REG_FIELD_C, c);
40 */
41
42#define __bf_shf(x) (__builtin_ffsll(x) - 1)
43
44#define __BF_FIELD_CHECK(_mask, _reg, _val, _pfx) \
45 ({ \
46 BUILD_BUG_ON_MSG(!__builtin_constant_p(_mask), \
47 _pfx "mask is not constant"); \
48 BUILD_BUG_ON_MSG((_mask) == 0, _pfx "mask is zero"); \
49 BUILD_BUG_ON_MSG(__builtin_constant_p(_val) ? \
50 ~((_mask) >> __bf_shf(_mask)) & (_val) : 0, \
51 _pfx "value too large for the field"); \
52 BUILD_BUG_ON_MSG((_mask) > (typeof(_reg))~0ull, \
53 _pfx "type of reg too small for mask"); \
54 __BUILD_BUG_ON_NOT_POWER_OF_2((_mask) + \
55 (1ULL << __bf_shf(_mask))); \
56 })
57
58/**
59 * FIELD_MAX() - produce the maximum value representable by a field
60 * @_mask: shifted mask defining the field's length and position
61 *
62 * FIELD_MAX() returns the maximum value that can be held in the field
63 * specified by @_mask.
64 */
65#define FIELD_MAX(_mask) \
66 ({ \
67 __BF_FIELD_CHECK(_mask, 0ULL, 0ULL, "FIELD_MAX: "); \
68 (typeof(_mask))((_mask) >> __bf_shf(_mask)); \
69 })
70
71/**
72 * FIELD_FIT() - check if value fits in the field
73 * @_mask: shifted mask defining the field's length and position
74 * @_val: value to test against the field
75 *
76 * Return: true if @_val can fit inside @_mask, false if @_val is too big.
77 */
78#define FIELD_FIT(_mask, _val) \
79 ({ \
80 __BF_FIELD_CHECK(_mask, 0ULL, 0ULL, "FIELD_FIT: "); \
81 !((((typeof(_mask))_val) << __bf_shf(_mask)) & ~(_mask)); \
82 })
83
84/**
85 * FIELD_PREP() - prepare a bitfield element
86 * @_mask: shifted mask defining the field's length and position
87 * @_val: value to put in the field
88 *
89 * FIELD_PREP() masks and shifts up the value. The result should
90 * be combined with other fields of the bitfield using logical OR.
91 */
92#define FIELD_PREP(_mask, _val) \
93 ({ \
94 __BF_FIELD_CHECK(_mask, 0ULL, _val, "FIELD_PREP: "); \
95 ((typeof(_mask))(_val) << __bf_shf(_mask)) & (_mask); \
96 })
97
98/**
99 * FIELD_GET() - extract a bitfield element
100 * @_mask: shifted mask defining the field's length and position
101 * @_reg: value of entire bitfield
102 *
103 * FIELD_GET() extracts the field specified by @_mask from the
104 * bitfield passed in as @_reg by masking and shifting it down.
105 */
106#define FIELD_GET(_mask, _reg) \
107 ({ \
108 __BF_FIELD_CHECK(_mask, _reg, 0U, "FIELD_GET: "); \
109 (typeof(_mask))(((_reg) & (_mask)) >> __bf_shf(_mask)); \
110 })
111
112extern void __compiletime_error("value doesn't fit into mask")
113__field_overflow(void);
114extern void __compiletime_error("bad bitfield mask")
115__bad_mask(void);
116static __always_inline u64 field_multiplier(u64 field)
117{
118 if ((field | (field - 1)) & ((field | (field - 1)) + 1))
119 __bad_mask();
120 return field & -field;
121}
122static __always_inline u64 field_mask(u64 field)
123{
124 return field / field_multiplier(field);
125}
126#define field_max(field) ((typeof(field))field_mask(field))
127#define ____MAKE_OP(type,base,to,from) \
128static __always_inline __##type type##_encode_bits(base v, base field) \
129{ \
130 if (__builtin_constant_p(v) && (v & ~field_mask(field))) \
131 __field_overflow(); \
132 return to((v & field_mask(field)) * field_multiplier(field)); \
133} \
134static __always_inline __##type type##_replace_bits(__##type old, \
135 base val, base field) \
136{ \
137 return (old & ~to(field)) | type##_encode_bits(val, field); \
138} \
139static __always_inline void type##p_replace_bits(__##type *p, \
140 base val, base field) \
141{ \
142 *p = (*p & ~to(field)) | type##_encode_bits(val, field); \
143} \
144static __always_inline base type##_get_bits(__##type v, base field) \
145{ \
146 return (from(v) & field)/field_multiplier(field); \
147}
148#define __MAKE_OP(size) \
149 ____MAKE_OP(le##size,u##size,cpu_to_le##size,le##size##_to_cpu) \
150 ____MAKE_OP(be##size,u##size,cpu_to_be##size,be##size##_to_cpu) \
151 ____MAKE_OP(u##size,u##size,,)
152____MAKE_OP(u8,u8,,)
153__MAKE_OP(16)
154__MAKE_OP(32)
155__MAKE_OP(64)
156#undef __MAKE_OP
157#undef ____MAKE_OP
158
159#endif
1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * Copyright (C) 2014 Felix Fietkau <nbd@nbd.name>
4 * Copyright (C) 2004 - 2009 Ivo van Doorn <IvDoorn@gmail.com>
5 */
6
7#ifndef _LINUX_BITFIELD_H
8#define _LINUX_BITFIELD_H
9
10#include <linux/build_bug.h>
11#include <asm/byteorder.h>
12
13/*
14 * Bitfield access macros
15 *
16 * FIELD_{GET,PREP} macros take as first parameter shifted mask
17 * from which they extract the base mask and shift amount.
18 * Mask must be a compilation time constant.
19 *
20 * Example:
21 *
22 * #include <linux/bitfield.h>
23 * #include <linux/bits.h>
24 *
25 * #define REG_FIELD_A GENMASK(6, 0)
26 * #define REG_FIELD_B BIT(7)
27 * #define REG_FIELD_C GENMASK(15, 8)
28 * #define REG_FIELD_D GENMASK(31, 16)
29 *
30 * Get:
31 * a = FIELD_GET(REG_FIELD_A, reg);
32 * b = FIELD_GET(REG_FIELD_B, reg);
33 *
34 * Set:
35 * reg = FIELD_PREP(REG_FIELD_A, 1) |
36 * FIELD_PREP(REG_FIELD_B, 0) |
37 * FIELD_PREP(REG_FIELD_C, c) |
38 * FIELD_PREP(REG_FIELD_D, 0x40);
39 *
40 * Modify:
41 * reg &= ~REG_FIELD_C;
42 * reg |= FIELD_PREP(REG_FIELD_C, c);
43 */
44
45#define __bf_shf(x) (__builtin_ffsll(x) - 1)
46
47#define __scalar_type_to_unsigned_cases(type) \
48 unsigned type: (unsigned type)0, \
49 signed type: (unsigned type)0
50
51#define __unsigned_scalar_typeof(x) typeof( \
52 _Generic((x), \
53 char: (unsigned char)0, \
54 __scalar_type_to_unsigned_cases(char), \
55 __scalar_type_to_unsigned_cases(short), \
56 __scalar_type_to_unsigned_cases(int), \
57 __scalar_type_to_unsigned_cases(long), \
58 __scalar_type_to_unsigned_cases(long long), \
59 default: (x)))
60
61#define __bf_cast_unsigned(type, x) ((__unsigned_scalar_typeof(type))(x))
62
63#define __BF_FIELD_CHECK(_mask, _reg, _val, _pfx) \
64 ({ \
65 BUILD_BUG_ON_MSG(!__builtin_constant_p(_mask), \
66 _pfx "mask is not constant"); \
67 BUILD_BUG_ON_MSG((_mask) == 0, _pfx "mask is zero"); \
68 BUILD_BUG_ON_MSG(__builtin_constant_p(_val) ? \
69 ~((_mask) >> __bf_shf(_mask)) & \
70 (0 + (_val)) : 0, \
71 _pfx "value too large for the field"); \
72 BUILD_BUG_ON_MSG(__bf_cast_unsigned(_mask, _mask) > \
73 __bf_cast_unsigned(_reg, ~0ull), \
74 _pfx "type of reg too small for mask"); \
75 __BUILD_BUG_ON_NOT_POWER_OF_2((_mask) + \
76 (1ULL << __bf_shf(_mask))); \
77 })
78
79/**
80 * FIELD_MAX() - produce the maximum value representable by a field
81 * @_mask: shifted mask defining the field's length and position
82 *
83 * FIELD_MAX() returns the maximum value that can be held in the field
84 * specified by @_mask.
85 */
86#define FIELD_MAX(_mask) \
87 ({ \
88 __BF_FIELD_CHECK(_mask, 0ULL, 0ULL, "FIELD_MAX: "); \
89 (typeof(_mask))((_mask) >> __bf_shf(_mask)); \
90 })
91
92/**
93 * FIELD_FIT() - check if value fits in the field
94 * @_mask: shifted mask defining the field's length and position
95 * @_val: value to test against the field
96 *
97 * Return: true if @_val can fit inside @_mask, false if @_val is too big.
98 */
99#define FIELD_FIT(_mask, _val) \
100 ({ \
101 __BF_FIELD_CHECK(_mask, 0ULL, 0ULL, "FIELD_FIT: "); \
102 !((((typeof(_mask))_val) << __bf_shf(_mask)) & ~(_mask)); \
103 })
104
105/**
106 * FIELD_PREP() - prepare a bitfield element
107 * @_mask: shifted mask defining the field's length and position
108 * @_val: value to put in the field
109 *
110 * FIELD_PREP() masks and shifts up the value. The result should
111 * be combined with other fields of the bitfield using logical OR.
112 */
113#define FIELD_PREP(_mask, _val) \
114 ({ \
115 __BF_FIELD_CHECK(_mask, 0ULL, _val, "FIELD_PREP: "); \
116 ((typeof(_mask))(_val) << __bf_shf(_mask)) & (_mask); \
117 })
118
119#define __BF_CHECK_POW2(n) BUILD_BUG_ON_ZERO(((n) & ((n) - 1)) != 0)
120
121/**
122 * FIELD_PREP_CONST() - prepare a constant bitfield element
123 * @_mask: shifted mask defining the field's length and position
124 * @_val: value to put in the field
125 *
126 * FIELD_PREP_CONST() masks and shifts up the value. The result should
127 * be combined with other fields of the bitfield using logical OR.
128 *
129 * Unlike FIELD_PREP() this is a constant expression and can therefore
130 * be used in initializers. Error checking is less comfortable for this
131 * version, and non-constant masks cannot be used.
132 */
133#define FIELD_PREP_CONST(_mask, _val) \
134 ( \
135 /* mask must be non-zero */ \
136 BUILD_BUG_ON_ZERO((_mask) == 0) + \
137 /* check if value fits */ \
138 BUILD_BUG_ON_ZERO(~((_mask) >> __bf_shf(_mask)) & (_val)) + \
139 /* check if mask is contiguous */ \
140 __BF_CHECK_POW2((_mask) + (1ULL << __bf_shf(_mask))) + \
141 /* and create the value */ \
142 (((typeof(_mask))(_val) << __bf_shf(_mask)) & (_mask)) \
143 )
144
145/**
146 * FIELD_GET() - extract a bitfield element
147 * @_mask: shifted mask defining the field's length and position
148 * @_reg: value of entire bitfield
149 *
150 * FIELD_GET() extracts the field specified by @_mask from the
151 * bitfield passed in as @_reg by masking and shifting it down.
152 */
153#define FIELD_GET(_mask, _reg) \
154 ({ \
155 __BF_FIELD_CHECK(_mask, _reg, 0U, "FIELD_GET: "); \
156 (typeof(_mask))(((_reg) & (_mask)) >> __bf_shf(_mask)); \
157 })
158
159extern void __compiletime_error("value doesn't fit into mask")
160__field_overflow(void);
161extern void __compiletime_error("bad bitfield mask")
162__bad_mask(void);
163static __always_inline u64 field_multiplier(u64 field)
164{
165 if ((field | (field - 1)) & ((field | (field - 1)) + 1))
166 __bad_mask();
167 return field & -field;
168}
169static __always_inline u64 field_mask(u64 field)
170{
171 return field / field_multiplier(field);
172}
173#define field_max(field) ((typeof(field))field_mask(field))
174#define ____MAKE_OP(type,base,to,from) \
175static __always_inline __##type type##_encode_bits(base v, base field) \
176{ \
177 if (__builtin_constant_p(v) && (v & ~field_mask(field))) \
178 __field_overflow(); \
179 return to((v & field_mask(field)) * field_multiplier(field)); \
180} \
181static __always_inline __##type type##_replace_bits(__##type old, \
182 base val, base field) \
183{ \
184 return (old & ~to(field)) | type##_encode_bits(val, field); \
185} \
186static __always_inline void type##p_replace_bits(__##type *p, \
187 base val, base field) \
188{ \
189 *p = (*p & ~to(field)) | type##_encode_bits(val, field); \
190} \
191static __always_inline base type##_get_bits(__##type v, base field) \
192{ \
193 return (from(v) & field)/field_multiplier(field); \
194}
195#define __MAKE_OP(size) \
196 ____MAKE_OP(le##size,u##size,cpu_to_le##size,le##size##_to_cpu) \
197 ____MAKE_OP(be##size,u##size,cpu_to_be##size,be##size##_to_cpu) \
198 ____MAKE_OP(u##size,u##size,,)
199____MAKE_OP(u8,u8,,)
200__MAKE_OP(16)
201__MAKE_OP(32)
202__MAKE_OP(64)
203#undef __MAKE_OP
204#undef ____MAKE_OP
205
206#endif