Linux Audio

Check our new training course

Loading...
v5.14.15
  1
 
  2#include <linux/export.h>
  3#include <linux/generic-radix-tree.h>
  4#include <linux/gfp.h>
  5#include <linux/kmemleak.h>
  6
  7#define GENRADIX_ARY		(PAGE_SIZE / sizeof(struct genradix_node *))
  8#define GENRADIX_ARY_SHIFT	ilog2(GENRADIX_ARY)
  9
 10struct genradix_node {
 11	union {
 12		/* Interior node: */
 13		struct genradix_node	*children[GENRADIX_ARY];
 14
 15		/* Leaf: */
 16		u8			data[PAGE_SIZE];
 17	};
 18};
 19
 20static inline int genradix_depth_shift(unsigned depth)
 21{
 22	return PAGE_SHIFT + GENRADIX_ARY_SHIFT * depth;
 23}
 24
 25/*
 26 * Returns size (of data, in bytes) that a tree of a given depth holds:
 27 */
 28static inline size_t genradix_depth_size(unsigned depth)
 29{
 30	return 1UL << genradix_depth_shift(depth);
 31}
 32
 33/* depth that's needed for a genradix that can address up to ULONG_MAX: */
 34#define GENRADIX_MAX_DEPTH	\
 35	DIV_ROUND_UP(BITS_PER_LONG - PAGE_SHIFT, GENRADIX_ARY_SHIFT)
 36
 37#define GENRADIX_DEPTH_MASK				\
 38	((unsigned long) (roundup_pow_of_two(GENRADIX_MAX_DEPTH + 1) - 1))
 39
 40static inline unsigned genradix_root_to_depth(struct genradix_root *r)
 41{
 42	return (unsigned long) r & GENRADIX_DEPTH_MASK;
 43}
 44
 45static inline struct genradix_node *genradix_root_to_node(struct genradix_root *r)
 46{
 47	return (void *) ((unsigned long) r & ~GENRADIX_DEPTH_MASK);
 48}
 49
 50/*
 51 * Returns pointer to the specified byte @offset within @radix, or NULL if not
 52 * allocated
 53 */
 54void *__genradix_ptr(struct __genradix *radix, size_t offset)
 55{
 56	struct genradix_root *r = READ_ONCE(radix->root);
 57	struct genradix_node *n = genradix_root_to_node(r);
 58	unsigned level		= genradix_root_to_depth(r);
 59
 60	if (ilog2(offset) >= genradix_depth_shift(level))
 61		return NULL;
 62
 63	while (1) {
 64		if (!n)
 65			return NULL;
 66		if (!level)
 67			break;
 68
 69		level--;
 70
 71		n = n->children[offset >> genradix_depth_shift(level)];
 72		offset &= genradix_depth_size(level) - 1;
 73	}
 74
 75	return &n->data[offset];
 76}
 77EXPORT_SYMBOL(__genradix_ptr);
 78
 79static inline struct genradix_node *genradix_alloc_node(gfp_t gfp_mask)
 80{
 81	struct genradix_node *node;
 82
 83	node = (struct genradix_node *)__get_free_page(gfp_mask|__GFP_ZERO);
 84
 85	/*
 86	 * We're using pages (not slab allocations) directly for kernel data
 87	 * structures, so we need to explicitly inform kmemleak of them in order
 88	 * to avoid false positive memory leak reports.
 89	 */
 90	kmemleak_alloc(node, PAGE_SIZE, 1, gfp_mask);
 91	return node;
 92}
 93
 94static inline void genradix_free_node(struct genradix_node *node)
 95{
 96	kmemleak_free(node);
 97	free_page((unsigned long)node);
 98}
 99
100/*
101 * Returns pointer to the specified byte @offset within @radix, allocating it if
102 * necessary - newly allocated slots are always zeroed out:
103 */
104void *__genradix_ptr_alloc(struct __genradix *radix, size_t offset,
105			   gfp_t gfp_mask)
106{
107	struct genradix_root *v = READ_ONCE(radix->root);
108	struct genradix_node *n, *new_node = NULL;
109	unsigned level;
110
111	/* Increase tree depth if necessary: */
112	while (1) {
113		struct genradix_root *r = v, *new_root;
114
115		n	= genradix_root_to_node(r);
116		level	= genradix_root_to_depth(r);
117
118		if (n && ilog2(offset) < genradix_depth_shift(level))
119			break;
120
121		if (!new_node) {
122			new_node = genradix_alloc_node(gfp_mask);
123			if (!new_node)
124				return NULL;
125		}
126
127		new_node->children[0] = n;
128		new_root = ((struct genradix_root *)
129			    ((unsigned long) new_node | (n ? level + 1 : 0)));
130
131		if ((v = cmpxchg_release(&radix->root, r, new_root)) == r) {
132			v = new_root;
133			new_node = NULL;
134		}
135	}
136
137	while (level--) {
138		struct genradix_node **p =
139			&n->children[offset >> genradix_depth_shift(level)];
140		offset &= genradix_depth_size(level) - 1;
141
142		n = READ_ONCE(*p);
143		if (!n) {
144			if (!new_node) {
145				new_node = genradix_alloc_node(gfp_mask);
146				if (!new_node)
147					return NULL;
148			}
149
150			if (!(n = cmpxchg_release(p, NULL, new_node)))
151				swap(n, new_node);
152		}
153	}
154
155	if (new_node)
156		genradix_free_node(new_node);
157
158	return &n->data[offset];
159}
160EXPORT_SYMBOL(__genradix_ptr_alloc);
161
162void *__genradix_iter_peek(struct genradix_iter *iter,
163			   struct __genradix *radix,
164			   size_t objs_per_page)
165{
166	struct genradix_root *r;
167	struct genradix_node *n;
168	unsigned level, i;
 
 
 
 
169restart:
170	r = READ_ONCE(radix->root);
171	if (!r)
172		return NULL;
173
174	n	= genradix_root_to_node(r);
175	level	= genradix_root_to_depth(r);
176
177	if (ilog2(iter->offset) >= genradix_depth_shift(level))
178		return NULL;
179
180	while (level) {
181		level--;
182
183		i = (iter->offset >> genradix_depth_shift(level)) &
184			(GENRADIX_ARY - 1);
185
186		while (!n->children[i]) {
 
 
 
 
 
 
 
 
187			i++;
188			iter->offset = round_down(iter->offset +
189					   genradix_depth_size(level),
190					   genradix_depth_size(level));
191			iter->pos = (iter->offset >> PAGE_SHIFT) *
192				objs_per_page;
193			if (i == GENRADIX_ARY)
194				goto restart;
195		}
196
197		n = n->children[i];
198	}
199
200	return &n->data[iter->offset & (PAGE_SIZE - 1)];
201}
202EXPORT_SYMBOL(__genradix_iter_peek);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
203
204static void genradix_free_recurse(struct genradix_node *n, unsigned level)
205{
206	if (level) {
207		unsigned i;
208
209		for (i = 0; i < GENRADIX_ARY; i++)
210			if (n->children[i])
211				genradix_free_recurse(n->children[i], level - 1);
212	}
213
214	genradix_free_node(n);
215}
216
217int __genradix_prealloc(struct __genradix *radix, size_t size,
218			gfp_t gfp_mask)
219{
220	size_t offset;
221
222	for (offset = 0; offset < size; offset += PAGE_SIZE)
223		if (!__genradix_ptr_alloc(radix, offset, gfp_mask))
224			return -ENOMEM;
225
226	return 0;
227}
228EXPORT_SYMBOL(__genradix_prealloc);
229
230void __genradix_free(struct __genradix *radix)
231{
232	struct genradix_root *r = xchg(&radix->root, NULL);
233
234	genradix_free_recurse(genradix_root_to_node(r),
235			      genradix_root_to_depth(r));
236}
237EXPORT_SYMBOL(__genradix_free);
v6.8
  1
  2#include <linux/atomic.h>
  3#include <linux/export.h>
  4#include <linux/generic-radix-tree.h>
  5#include <linux/gfp.h>
  6#include <linux/kmemleak.h>
  7
  8#define GENRADIX_ARY		(PAGE_SIZE / sizeof(struct genradix_node *))
  9#define GENRADIX_ARY_SHIFT	ilog2(GENRADIX_ARY)
 10
 11struct genradix_node {
 12	union {
 13		/* Interior node: */
 14		struct genradix_node	*children[GENRADIX_ARY];
 15
 16		/* Leaf: */
 17		u8			data[PAGE_SIZE];
 18	};
 19};
 20
 21static inline int genradix_depth_shift(unsigned depth)
 22{
 23	return PAGE_SHIFT + GENRADIX_ARY_SHIFT * depth;
 24}
 25
 26/*
 27 * Returns size (of data, in bytes) that a tree of a given depth holds:
 28 */
 29static inline size_t genradix_depth_size(unsigned depth)
 30{
 31	return 1UL << genradix_depth_shift(depth);
 32}
 33
 34/* depth that's needed for a genradix that can address up to ULONG_MAX: */
 35#define GENRADIX_MAX_DEPTH	\
 36	DIV_ROUND_UP(BITS_PER_LONG - PAGE_SHIFT, GENRADIX_ARY_SHIFT)
 37
 38#define GENRADIX_DEPTH_MASK				\
 39	((unsigned long) (roundup_pow_of_two(GENRADIX_MAX_DEPTH + 1) - 1))
 40
 41static inline unsigned genradix_root_to_depth(struct genradix_root *r)
 42{
 43	return (unsigned long) r & GENRADIX_DEPTH_MASK;
 44}
 45
 46static inline struct genradix_node *genradix_root_to_node(struct genradix_root *r)
 47{
 48	return (void *) ((unsigned long) r & ~GENRADIX_DEPTH_MASK);
 49}
 50
 51/*
 52 * Returns pointer to the specified byte @offset within @radix, or NULL if not
 53 * allocated
 54 */
 55void *__genradix_ptr(struct __genradix *radix, size_t offset)
 56{
 57	struct genradix_root *r = READ_ONCE(radix->root);
 58	struct genradix_node *n = genradix_root_to_node(r);
 59	unsigned level		= genradix_root_to_depth(r);
 60
 61	if (ilog2(offset) >= genradix_depth_shift(level))
 62		return NULL;
 63
 64	while (1) {
 65		if (!n)
 66			return NULL;
 67		if (!level)
 68			break;
 69
 70		level--;
 71
 72		n = n->children[offset >> genradix_depth_shift(level)];
 73		offset &= genradix_depth_size(level) - 1;
 74	}
 75
 76	return &n->data[offset];
 77}
 78EXPORT_SYMBOL(__genradix_ptr);
 79
 80static inline struct genradix_node *genradix_alloc_node(gfp_t gfp_mask)
 81{
 82	struct genradix_node *node;
 83
 84	node = (struct genradix_node *)__get_free_page(gfp_mask|__GFP_ZERO);
 85
 86	/*
 87	 * We're using pages (not slab allocations) directly for kernel data
 88	 * structures, so we need to explicitly inform kmemleak of them in order
 89	 * to avoid false positive memory leak reports.
 90	 */
 91	kmemleak_alloc(node, PAGE_SIZE, 1, gfp_mask);
 92	return node;
 93}
 94
 95static inline void genradix_free_node(struct genradix_node *node)
 96{
 97	kmemleak_free(node);
 98	free_page((unsigned long)node);
 99}
100
101/*
102 * Returns pointer to the specified byte @offset within @radix, allocating it if
103 * necessary - newly allocated slots are always zeroed out:
104 */
105void *__genradix_ptr_alloc(struct __genradix *radix, size_t offset,
106			   gfp_t gfp_mask)
107{
108	struct genradix_root *v = READ_ONCE(radix->root);
109	struct genradix_node *n, *new_node = NULL;
110	unsigned level;
111
112	/* Increase tree depth if necessary: */
113	while (1) {
114		struct genradix_root *r = v, *new_root;
115
116		n	= genradix_root_to_node(r);
117		level	= genradix_root_to_depth(r);
118
119		if (n && ilog2(offset) < genradix_depth_shift(level))
120			break;
121
122		if (!new_node) {
123			new_node = genradix_alloc_node(gfp_mask);
124			if (!new_node)
125				return NULL;
126		}
127
128		new_node->children[0] = n;
129		new_root = ((struct genradix_root *)
130			    ((unsigned long) new_node | (n ? level + 1 : 0)));
131
132		if ((v = cmpxchg_release(&radix->root, r, new_root)) == r) {
133			v = new_root;
134			new_node = NULL;
135		}
136	}
137
138	while (level--) {
139		struct genradix_node **p =
140			&n->children[offset >> genradix_depth_shift(level)];
141		offset &= genradix_depth_size(level) - 1;
142
143		n = READ_ONCE(*p);
144		if (!n) {
145			if (!new_node) {
146				new_node = genradix_alloc_node(gfp_mask);
147				if (!new_node)
148					return NULL;
149			}
150
151			if (!(n = cmpxchg_release(p, NULL, new_node)))
152				swap(n, new_node);
153		}
154	}
155
156	if (new_node)
157		genradix_free_node(new_node);
158
159	return &n->data[offset];
160}
161EXPORT_SYMBOL(__genradix_ptr_alloc);
162
163void *__genradix_iter_peek(struct genradix_iter *iter,
164			   struct __genradix *radix,
165			   size_t objs_per_page)
166{
167	struct genradix_root *r;
168	struct genradix_node *n;
169	unsigned level, i;
170
171	if (iter->offset == SIZE_MAX)
172		return NULL;
173
174restart:
175	r = READ_ONCE(radix->root);
176	if (!r)
177		return NULL;
178
179	n	= genradix_root_to_node(r);
180	level	= genradix_root_to_depth(r);
181
182	if (ilog2(iter->offset) >= genradix_depth_shift(level))
183		return NULL;
184
185	while (level) {
186		level--;
187
188		i = (iter->offset >> genradix_depth_shift(level)) &
189			(GENRADIX_ARY - 1);
190
191		while (!n->children[i]) {
192			size_t objs_per_ptr = genradix_depth_size(level);
193
194			if (iter->offset + objs_per_ptr < iter->offset) {
195				iter->offset	= SIZE_MAX;
196				iter->pos	= SIZE_MAX;
197				return NULL;
198			}
199
200			i++;
201			iter->offset = round_down(iter->offset + objs_per_ptr,
202						  objs_per_ptr);
 
203			iter->pos = (iter->offset >> PAGE_SHIFT) *
204				objs_per_page;
205			if (i == GENRADIX_ARY)
206				goto restart;
207		}
208
209		n = n->children[i];
210	}
211
212	return &n->data[iter->offset & (PAGE_SIZE - 1)];
213}
214EXPORT_SYMBOL(__genradix_iter_peek);
215
216void *__genradix_iter_peek_prev(struct genradix_iter *iter,
217				struct __genradix *radix,
218				size_t objs_per_page,
219				size_t obj_size_plus_page_remainder)
220{
221	struct genradix_root *r;
222	struct genradix_node *n;
223	unsigned level, i;
224
225	if (iter->offset == SIZE_MAX)
226		return NULL;
227
228restart:
229	r = READ_ONCE(radix->root);
230	if (!r)
231		return NULL;
232
233	n	= genradix_root_to_node(r);
234	level	= genradix_root_to_depth(r);
235
236	if (ilog2(iter->offset) >= genradix_depth_shift(level)) {
237		iter->offset = genradix_depth_size(level);
238		iter->pos = (iter->offset >> PAGE_SHIFT) * objs_per_page;
239
240		iter->offset -= obj_size_plus_page_remainder;
241		iter->pos--;
242	}
243
244	while (level) {
245		level--;
246
247		i = (iter->offset >> genradix_depth_shift(level)) &
248			(GENRADIX_ARY - 1);
249
250		while (!n->children[i]) {
251			size_t objs_per_ptr = genradix_depth_size(level);
252
253			iter->offset = round_down(iter->offset, objs_per_ptr);
254			iter->pos = (iter->offset >> PAGE_SHIFT) * objs_per_page;
255
256			if (!iter->offset)
257				return NULL;
258
259			iter->offset -= obj_size_plus_page_remainder;
260			iter->pos--;
261
262			if (!i)
263				goto restart;
264			--i;
265		}
266
267		n = n->children[i];
268	}
269
270	return &n->data[iter->offset & (PAGE_SIZE - 1)];
271}
272EXPORT_SYMBOL(__genradix_iter_peek_prev);
273
274static void genradix_free_recurse(struct genradix_node *n, unsigned level)
275{
276	if (level) {
277		unsigned i;
278
279		for (i = 0; i < GENRADIX_ARY; i++)
280			if (n->children[i])
281				genradix_free_recurse(n->children[i], level - 1);
282	}
283
284	genradix_free_node(n);
285}
286
287int __genradix_prealloc(struct __genradix *radix, size_t size,
288			gfp_t gfp_mask)
289{
290	size_t offset;
291
292	for (offset = 0; offset < size; offset += PAGE_SIZE)
293		if (!__genradix_ptr_alloc(radix, offset, gfp_mask))
294			return -ENOMEM;
295
296	return 0;
297}
298EXPORT_SYMBOL(__genradix_prealloc);
299
300void __genradix_free(struct __genradix *radix)
301{
302	struct genradix_root *r = xchg(&radix->root, NULL);
303
304	genradix_free_recurse(genradix_root_to_node(r),
305			      genradix_root_to_depth(r));
306}
307EXPORT_SYMBOL(__genradix_free);