Loading...
1
2#include <linux/export.h>
3#include <linux/generic-radix-tree.h>
4#include <linux/gfp.h>
5#include <linux/kmemleak.h>
6
7#define GENRADIX_ARY (PAGE_SIZE / sizeof(struct genradix_node *))
8#define GENRADIX_ARY_SHIFT ilog2(GENRADIX_ARY)
9
10struct genradix_node {
11 union {
12 /* Interior node: */
13 struct genradix_node *children[GENRADIX_ARY];
14
15 /* Leaf: */
16 u8 data[PAGE_SIZE];
17 };
18};
19
20static inline int genradix_depth_shift(unsigned depth)
21{
22 return PAGE_SHIFT + GENRADIX_ARY_SHIFT * depth;
23}
24
25/*
26 * Returns size (of data, in bytes) that a tree of a given depth holds:
27 */
28static inline size_t genradix_depth_size(unsigned depth)
29{
30 return 1UL << genradix_depth_shift(depth);
31}
32
33/* depth that's needed for a genradix that can address up to ULONG_MAX: */
34#define GENRADIX_MAX_DEPTH \
35 DIV_ROUND_UP(BITS_PER_LONG - PAGE_SHIFT, GENRADIX_ARY_SHIFT)
36
37#define GENRADIX_DEPTH_MASK \
38 ((unsigned long) (roundup_pow_of_two(GENRADIX_MAX_DEPTH + 1) - 1))
39
40static inline unsigned genradix_root_to_depth(struct genradix_root *r)
41{
42 return (unsigned long) r & GENRADIX_DEPTH_MASK;
43}
44
45static inline struct genradix_node *genradix_root_to_node(struct genradix_root *r)
46{
47 return (void *) ((unsigned long) r & ~GENRADIX_DEPTH_MASK);
48}
49
50/*
51 * Returns pointer to the specified byte @offset within @radix, or NULL if not
52 * allocated
53 */
54void *__genradix_ptr(struct __genradix *radix, size_t offset)
55{
56 struct genradix_root *r = READ_ONCE(radix->root);
57 struct genradix_node *n = genradix_root_to_node(r);
58 unsigned level = genradix_root_to_depth(r);
59
60 if (ilog2(offset) >= genradix_depth_shift(level))
61 return NULL;
62
63 while (1) {
64 if (!n)
65 return NULL;
66 if (!level)
67 break;
68
69 level--;
70
71 n = n->children[offset >> genradix_depth_shift(level)];
72 offset &= genradix_depth_size(level) - 1;
73 }
74
75 return &n->data[offset];
76}
77EXPORT_SYMBOL(__genradix_ptr);
78
79static inline struct genradix_node *genradix_alloc_node(gfp_t gfp_mask)
80{
81 struct genradix_node *node;
82
83 node = (struct genradix_node *)__get_free_page(gfp_mask|__GFP_ZERO);
84
85 /*
86 * We're using pages (not slab allocations) directly for kernel data
87 * structures, so we need to explicitly inform kmemleak of them in order
88 * to avoid false positive memory leak reports.
89 */
90 kmemleak_alloc(node, PAGE_SIZE, 1, gfp_mask);
91 return node;
92}
93
94static inline void genradix_free_node(struct genradix_node *node)
95{
96 kmemleak_free(node);
97 free_page((unsigned long)node);
98}
99
100/*
101 * Returns pointer to the specified byte @offset within @radix, allocating it if
102 * necessary - newly allocated slots are always zeroed out:
103 */
104void *__genradix_ptr_alloc(struct __genradix *radix, size_t offset,
105 gfp_t gfp_mask)
106{
107 struct genradix_root *v = READ_ONCE(radix->root);
108 struct genradix_node *n, *new_node = NULL;
109 unsigned level;
110
111 /* Increase tree depth if necessary: */
112 while (1) {
113 struct genradix_root *r = v, *new_root;
114
115 n = genradix_root_to_node(r);
116 level = genradix_root_to_depth(r);
117
118 if (n && ilog2(offset) < genradix_depth_shift(level))
119 break;
120
121 if (!new_node) {
122 new_node = genradix_alloc_node(gfp_mask);
123 if (!new_node)
124 return NULL;
125 }
126
127 new_node->children[0] = n;
128 new_root = ((struct genradix_root *)
129 ((unsigned long) new_node | (n ? level + 1 : 0)));
130
131 if ((v = cmpxchg_release(&radix->root, r, new_root)) == r) {
132 v = new_root;
133 new_node = NULL;
134 }
135 }
136
137 while (level--) {
138 struct genradix_node **p =
139 &n->children[offset >> genradix_depth_shift(level)];
140 offset &= genradix_depth_size(level) - 1;
141
142 n = READ_ONCE(*p);
143 if (!n) {
144 if (!new_node) {
145 new_node = genradix_alloc_node(gfp_mask);
146 if (!new_node)
147 return NULL;
148 }
149
150 if (!(n = cmpxchg_release(p, NULL, new_node)))
151 swap(n, new_node);
152 }
153 }
154
155 if (new_node)
156 genradix_free_node(new_node);
157
158 return &n->data[offset];
159}
160EXPORT_SYMBOL(__genradix_ptr_alloc);
161
162void *__genradix_iter_peek(struct genradix_iter *iter,
163 struct __genradix *radix,
164 size_t objs_per_page)
165{
166 struct genradix_root *r;
167 struct genradix_node *n;
168 unsigned level, i;
169restart:
170 r = READ_ONCE(radix->root);
171 if (!r)
172 return NULL;
173
174 n = genradix_root_to_node(r);
175 level = genradix_root_to_depth(r);
176
177 if (ilog2(iter->offset) >= genradix_depth_shift(level))
178 return NULL;
179
180 while (level) {
181 level--;
182
183 i = (iter->offset >> genradix_depth_shift(level)) &
184 (GENRADIX_ARY - 1);
185
186 while (!n->children[i]) {
187 i++;
188 iter->offset = round_down(iter->offset +
189 genradix_depth_size(level),
190 genradix_depth_size(level));
191 iter->pos = (iter->offset >> PAGE_SHIFT) *
192 objs_per_page;
193 if (i == GENRADIX_ARY)
194 goto restart;
195 }
196
197 n = n->children[i];
198 }
199
200 return &n->data[iter->offset & (PAGE_SIZE - 1)];
201}
202EXPORT_SYMBOL(__genradix_iter_peek);
203
204static void genradix_free_recurse(struct genradix_node *n, unsigned level)
205{
206 if (level) {
207 unsigned i;
208
209 for (i = 0; i < GENRADIX_ARY; i++)
210 if (n->children[i])
211 genradix_free_recurse(n->children[i], level - 1);
212 }
213
214 genradix_free_node(n);
215}
216
217int __genradix_prealloc(struct __genradix *radix, size_t size,
218 gfp_t gfp_mask)
219{
220 size_t offset;
221
222 for (offset = 0; offset < size; offset += PAGE_SIZE)
223 if (!__genradix_ptr_alloc(radix, offset, gfp_mask))
224 return -ENOMEM;
225
226 return 0;
227}
228EXPORT_SYMBOL(__genradix_prealloc);
229
230void __genradix_free(struct __genradix *radix)
231{
232 struct genradix_root *r = xchg(&radix->root, NULL);
233
234 genradix_free_recurse(genradix_root_to_node(r),
235 genradix_root_to_depth(r));
236}
237EXPORT_SYMBOL(__genradix_free);
1
2#include <linux/atomic.h>
3#include <linux/export.h>
4#include <linux/generic-radix-tree.h>
5#include <linux/gfp.h>
6#include <linux/kmemleak.h>
7
8#define GENRADIX_ARY (GENRADIX_NODE_SIZE / sizeof(struct genradix_node *))
9#define GENRADIX_ARY_SHIFT ilog2(GENRADIX_ARY)
10
11struct genradix_node {
12 union {
13 /* Interior node: */
14 struct genradix_node *children[GENRADIX_ARY];
15
16 /* Leaf: */
17 u8 data[GENRADIX_NODE_SIZE];
18 };
19};
20
21static inline int genradix_depth_shift(unsigned depth)
22{
23 return GENRADIX_NODE_SHIFT + GENRADIX_ARY_SHIFT * depth;
24}
25
26/*
27 * Returns size (of data, in bytes) that a tree of a given depth holds:
28 */
29static inline size_t genradix_depth_size(unsigned depth)
30{
31 return 1UL << genradix_depth_shift(depth);
32}
33
34/* depth that's needed for a genradix that can address up to ULONG_MAX: */
35#define GENRADIX_MAX_DEPTH \
36 DIV_ROUND_UP(BITS_PER_LONG - GENRADIX_NODE_SHIFT, GENRADIX_ARY_SHIFT)
37
38#define GENRADIX_DEPTH_MASK \
39 ((unsigned long) (roundup_pow_of_two(GENRADIX_MAX_DEPTH + 1) - 1))
40
41static inline unsigned genradix_root_to_depth(struct genradix_root *r)
42{
43 return (unsigned long) r & GENRADIX_DEPTH_MASK;
44}
45
46static inline struct genradix_node *genradix_root_to_node(struct genradix_root *r)
47{
48 return (void *) ((unsigned long) r & ~GENRADIX_DEPTH_MASK);
49}
50
51/*
52 * Returns pointer to the specified byte @offset within @radix, or NULL if not
53 * allocated
54 */
55void *__genradix_ptr(struct __genradix *radix, size_t offset)
56{
57 struct genradix_root *r = READ_ONCE(radix->root);
58 struct genradix_node *n = genradix_root_to_node(r);
59 unsigned level = genradix_root_to_depth(r);
60
61 if (ilog2(offset) >= genradix_depth_shift(level))
62 return NULL;
63
64 while (1) {
65 if (!n)
66 return NULL;
67 if (!level)
68 break;
69
70 level--;
71
72 n = n->children[offset >> genradix_depth_shift(level)];
73 offset &= genradix_depth_size(level) - 1;
74 }
75
76 return &n->data[offset];
77}
78EXPORT_SYMBOL(__genradix_ptr);
79
80static inline struct genradix_node *genradix_alloc_node(gfp_t gfp_mask)
81{
82 return kzalloc(GENRADIX_NODE_SIZE, gfp_mask);
83}
84
85static inline void genradix_free_node(struct genradix_node *node)
86{
87 kfree(node);
88}
89
90/*
91 * Returns pointer to the specified byte @offset within @radix, allocating it if
92 * necessary - newly allocated slots are always zeroed out:
93 */
94void *__genradix_ptr_alloc(struct __genradix *radix, size_t offset,
95 gfp_t gfp_mask)
96{
97 struct genradix_root *v = READ_ONCE(radix->root);
98 struct genradix_node *n, *new_node = NULL;
99 unsigned level;
100
101 /* Increase tree depth if necessary: */
102 while (1) {
103 struct genradix_root *r = v, *new_root;
104
105 n = genradix_root_to_node(r);
106 level = genradix_root_to_depth(r);
107
108 if (n && ilog2(offset) < genradix_depth_shift(level))
109 break;
110
111 if (!new_node) {
112 new_node = genradix_alloc_node(gfp_mask);
113 if (!new_node)
114 return NULL;
115 }
116
117 new_node->children[0] = n;
118 new_root = ((struct genradix_root *)
119 ((unsigned long) new_node | (n ? level + 1 : 0)));
120
121 if ((v = cmpxchg_release(&radix->root, r, new_root)) == r) {
122 v = new_root;
123 new_node = NULL;
124 }
125 }
126
127 while (level--) {
128 struct genradix_node **p =
129 &n->children[offset >> genradix_depth_shift(level)];
130 offset &= genradix_depth_size(level) - 1;
131
132 n = READ_ONCE(*p);
133 if (!n) {
134 if (!new_node) {
135 new_node = genradix_alloc_node(gfp_mask);
136 if (!new_node)
137 return NULL;
138 }
139
140 if (!(n = cmpxchg_release(p, NULL, new_node)))
141 swap(n, new_node);
142 }
143 }
144
145 if (new_node)
146 genradix_free_node(new_node);
147
148 return &n->data[offset];
149}
150EXPORT_SYMBOL(__genradix_ptr_alloc);
151
152void *__genradix_iter_peek(struct genradix_iter *iter,
153 struct __genradix *radix,
154 size_t objs_per_page)
155{
156 struct genradix_root *r;
157 struct genradix_node *n;
158 unsigned level, i;
159
160 if (iter->offset == SIZE_MAX)
161 return NULL;
162
163restart:
164 r = READ_ONCE(radix->root);
165 if (!r)
166 return NULL;
167
168 n = genradix_root_to_node(r);
169 level = genradix_root_to_depth(r);
170
171 if (ilog2(iter->offset) >= genradix_depth_shift(level))
172 return NULL;
173
174 while (level) {
175 level--;
176
177 i = (iter->offset >> genradix_depth_shift(level)) &
178 (GENRADIX_ARY - 1);
179
180 while (!n->children[i]) {
181 size_t objs_per_ptr = genradix_depth_size(level);
182
183 if (iter->offset + objs_per_ptr < iter->offset) {
184 iter->offset = SIZE_MAX;
185 iter->pos = SIZE_MAX;
186 return NULL;
187 }
188
189 i++;
190 iter->offset = round_down(iter->offset + objs_per_ptr,
191 objs_per_ptr);
192 iter->pos = (iter->offset >> GENRADIX_NODE_SHIFT) *
193 objs_per_page;
194 if (i == GENRADIX_ARY)
195 goto restart;
196 }
197
198 n = n->children[i];
199 }
200
201 return &n->data[iter->offset & (GENRADIX_NODE_SIZE - 1)];
202}
203EXPORT_SYMBOL(__genradix_iter_peek);
204
205void *__genradix_iter_peek_prev(struct genradix_iter *iter,
206 struct __genradix *radix,
207 size_t objs_per_page,
208 size_t obj_size_plus_page_remainder)
209{
210 struct genradix_root *r;
211 struct genradix_node *n;
212 unsigned level, i;
213
214 if (iter->offset == SIZE_MAX)
215 return NULL;
216
217restart:
218 r = READ_ONCE(radix->root);
219 if (!r)
220 return NULL;
221
222 n = genradix_root_to_node(r);
223 level = genradix_root_to_depth(r);
224
225 if (ilog2(iter->offset) >= genradix_depth_shift(level)) {
226 iter->offset = genradix_depth_size(level);
227 iter->pos = (iter->offset >> GENRADIX_NODE_SHIFT) * objs_per_page;
228
229 iter->offset -= obj_size_plus_page_remainder;
230 iter->pos--;
231 }
232
233 while (level) {
234 level--;
235
236 i = (iter->offset >> genradix_depth_shift(level)) &
237 (GENRADIX_ARY - 1);
238
239 while (!n->children[i]) {
240 size_t objs_per_ptr = genradix_depth_size(level);
241
242 iter->offset = round_down(iter->offset, objs_per_ptr);
243 iter->pos = (iter->offset >> GENRADIX_NODE_SHIFT) * objs_per_page;
244
245 if (!iter->offset)
246 return NULL;
247
248 iter->offset -= obj_size_plus_page_remainder;
249 iter->pos--;
250
251 if (!i)
252 goto restart;
253 --i;
254 }
255
256 n = n->children[i];
257 }
258
259 return &n->data[iter->offset & (GENRADIX_NODE_SIZE - 1)];
260}
261EXPORT_SYMBOL(__genradix_iter_peek_prev);
262
263static void genradix_free_recurse(struct genradix_node *n, unsigned level)
264{
265 if (level) {
266 unsigned i;
267
268 for (i = 0; i < GENRADIX_ARY; i++)
269 if (n->children[i])
270 genradix_free_recurse(n->children[i], level - 1);
271 }
272
273 genradix_free_node(n);
274}
275
276int __genradix_prealloc(struct __genradix *radix, size_t size,
277 gfp_t gfp_mask)
278{
279 size_t offset;
280
281 for (offset = 0; offset < size; offset += GENRADIX_NODE_SIZE)
282 if (!__genradix_ptr_alloc(radix, offset, gfp_mask))
283 return -ENOMEM;
284
285 return 0;
286}
287EXPORT_SYMBOL(__genradix_prealloc);
288
289void __genradix_free(struct __genradix *radix)
290{
291 struct genradix_root *r = xchg(&radix->root, NULL);
292
293 genradix_free_recurse(genradix_root_to_node(r),
294 genradix_root_to_depth(r));
295}
296EXPORT_SYMBOL(__genradix_free);