radixtree.c revision 1.34 1 1.34 chs /* $NetBSD: radixtree.c,v 1.34 2024/05/04 17:58:24 chs Exp $ */
2 1.1 yamt
3 1.1 yamt /*-
4 1.18 ad * Copyright (c)2011,2012,2013 YAMAMOTO Takashi,
5 1.1 yamt * All rights reserved.
6 1.1 yamt *
7 1.1 yamt * Redistribution and use in source and binary forms, with or without
8 1.1 yamt * modification, are permitted provided that the following conditions
9 1.1 yamt * are met:
10 1.1 yamt * 1. Redistributions of source code must retain the above copyright
11 1.1 yamt * notice, this list of conditions and the following disclaimer.
12 1.1 yamt * 2. Redistributions in binary form must reproduce the above copyright
13 1.1 yamt * notice, this list of conditions and the following disclaimer in the
14 1.1 yamt * documentation and/or other materials provided with the distribution.
15 1.1 yamt *
16 1.1 yamt * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17 1.1 yamt * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 1.1 yamt * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 1.1 yamt * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20 1.1 yamt * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 1.1 yamt * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 1.1 yamt * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 1.1 yamt * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 1.1 yamt * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 1.1 yamt * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 1.1 yamt * SUCH DAMAGE.
27 1.1 yamt */
28 1.1 yamt
29 1.1 yamt /*
30 1.17 yamt * radixtree.c
31 1.1 yamt *
32 1.18 ad * Overview:
33 1.18 ad *
34 1.18 ad * This is an implementation of radix tree, whose keys are uint64_t and leafs
35 1.17 yamt * are user provided pointers.
36 1.17 yamt *
37 1.18 ad * Leaf nodes are just void * and this implementation doesn't care about
38 1.18 ad * what they actually point to. However, this implementation has an assumption
39 1.18 ad * about their alignment. Specifically, this implementation assumes that their
40 1.18 ad * 2 LSBs are always zero and uses them for internal accounting.
41 1.18 ad *
42 1.18 ad * Intermediate nodes and memory allocation:
43 1.18 ad *
44 1.18 ad * Intermediate nodes are automatically allocated and freed internally and
45 1.18 ad * basically users don't need to care about them. The allocation is done via
46 1.32 ad * kmem_zalloc(9) for _KERNEL, malloc(3) for userland, and alloc() for
47 1.18 ad * _STANDALONE environment. Only radix_tree_insert_node function can allocate
48 1.18 ad * memory for intermediate nodes and thus can fail for ENOMEM.
49 1.18 ad *
50 1.18 ad * Memory Efficiency:
51 1.18 ad *
52 1.18 ad * It's designed to work efficiently with dense index distribution.
53 1.18 ad * The memory consumption (number of necessary intermediate nodes) heavily
54 1.18 ad * depends on the index distribution. Basically, more dense index distribution
55 1.18 ad * consumes less nodes per item. Approximately,
56 1.18 ad *
57 1.18 ad * - the best case: about RADIX_TREE_PTR_PER_NODE items per intermediate node.
58 1.18 ad * it would look like the following.
59 1.18 ad *
60 1.18 ad * root (t_height=1)
61 1.18 ad * |
62 1.18 ad * v
63 1.18 ad * [ | | | ] (intermediate node. RADIX_TREE_PTR_PER_NODE=4 in this fig)
64 1.18 ad * | | | |
65 1.18 ad * v v v v
66 1.18 ad * p p p p (items)
67 1.18 ad *
68 1.18 ad * - the worst case: RADIX_TREE_MAX_HEIGHT intermediate nodes per item.
69 1.18 ad * it would look like the following if RADIX_TREE_MAX_HEIGHT=3.
70 1.18 ad *
71 1.18 ad * root (t_height=3)
72 1.18 ad * |
73 1.18 ad * v
74 1.18 ad * [ | | | ]
75 1.18 ad * |
76 1.18 ad * v
77 1.18 ad * [ | | | ]
78 1.18 ad * |
79 1.18 ad * v
80 1.18 ad * [ | | | ]
81 1.18 ad * |
82 1.18 ad * v
83 1.18 ad * p
84 1.18 ad *
85 1.18 ad * The height of tree (t_height) is dynamic. It's smaller if only small
86 1.18 ad * index values are used. As an extreme case, if only index 0 is used,
87 1.18 ad * the corresponding value is directly stored in the root of the tree
88 1.18 ad * (struct radix_tree) without allocating any intermediate nodes. In that
89 1.18 ad * case, t_height=0.
90 1.18 ad *
91 1.18 ad * Gang lookup:
92 1.17 yamt *
93 1.18 ad * This implementation provides a way to scan many nodes quickly via
94 1.17 yamt * radix_tree_gang_lookup_node function and its varients.
95 1.17 yamt *
96 1.18 ad * Tags:
97 1.18 ad *
98 1.18 ad * This implementation provides tagging functionality, which allows quick
99 1.18 ad * scanning of a subset of leaf nodes. Leaf nodes are untagged when inserted
100 1.18 ad * into the tree and can be tagged by radix_tree_set_tag function.
101 1.18 ad * radix_tree_gang_lookup_tagged_node function and its variants returns only
102 1.18 ad * leaf nodes with the given tag. To reduce amount of nodes to visit for
103 1.17 yamt * these functions, this implementation keeps tagging information in internal
104 1.17 yamt * intermediate nodes and quickly skips uninterested parts of a tree.
105 1.18 ad *
106 1.18 ad * A tree has RADIX_TREE_TAG_ID_MAX independent tag spaces, each of which are
107 1.29 andvar * identified by a zero-origin numbers, tagid. For the current implementation,
108 1.18 ad * RADIX_TREE_TAG_ID_MAX is 2. A set of tags is described as a bitmask tagmask,
109 1.18 ad * which is a bitwise OR of (1 << tagid).
110 1.1 yamt */
111 1.1 yamt
112 1.1 yamt #include <sys/cdefs.h>
113 1.1 yamt
114 1.2 yamt #if defined(_KERNEL) || defined(_STANDALONE)
115 1.34 chs __KERNEL_RCSID(0, "$NetBSD: radixtree.c,v 1.34 2024/05/04 17:58:24 chs Exp $");
116 1.1 yamt #include <sys/param.h>
117 1.3 yamt #include <sys/errno.h>
118 1.32 ad #include <sys/kmem.h>
119 1.1 yamt #include <sys/radixtree.h>
120 1.3 yamt #include <lib/libkern/libkern.h>
121 1.3 yamt #if defined(_STANDALONE)
122 1.3 yamt #include <lib/libsa/stand.h>
123 1.3 yamt #endif /* defined(_STANDALONE) */
124 1.2 yamt #else /* defined(_KERNEL) || defined(_STANDALONE) */
125 1.34 chs __RCSID("$NetBSD: radixtree.c,v 1.34 2024/05/04 17:58:24 chs Exp $");
126 1.1 yamt #include <assert.h>
127 1.1 yamt #include <errno.h>
128 1.1 yamt #include <stdbool.h>
129 1.1 yamt #include <stdlib.h>
130 1.8 yamt #include <string.h>
131 1.1 yamt #if 1
132 1.1 yamt #define KASSERT assert
133 1.1 yamt #else
134 1.1 yamt #define KASSERT(a) /* nothing */
135 1.1 yamt #endif
136 1.2 yamt #endif /* defined(_KERNEL) || defined(_STANDALONE) */
137 1.1 yamt
138 1.1 yamt #include <sys/radixtree.h>
139 1.1 yamt
140 1.1 yamt #define RADIX_TREE_BITS_PER_HEIGHT 4 /* XXX tune */
141 1.1 yamt #define RADIX_TREE_PTR_PER_NODE (1 << RADIX_TREE_BITS_PER_HEIGHT)
142 1.1 yamt #define RADIX_TREE_MAX_HEIGHT (64 / RADIX_TREE_BITS_PER_HEIGHT)
143 1.15 yamt #define RADIX_TREE_INVALID_HEIGHT (RADIX_TREE_MAX_HEIGHT + 1)
144 1.2 yamt __CTASSERT((64 % RADIX_TREE_BITS_PER_HEIGHT) == 0);
145 1.1 yamt
146 1.2 yamt __CTASSERT(((1 << RADIX_TREE_TAG_ID_MAX) & (sizeof(int) - 1)) == 0);
147 1.1 yamt #define RADIX_TREE_TAG_MASK ((1 << RADIX_TREE_TAG_ID_MAX) - 1)
148 1.1 yamt
149 1.1 yamt static inline void *
150 1.1 yamt entry_ptr(void *p)
151 1.1 yamt {
152 1.1 yamt
153 1.1 yamt return (void *)((uintptr_t)p & ~RADIX_TREE_TAG_MASK);
154 1.1 yamt }
155 1.1 yamt
156 1.1 yamt static inline unsigned int
157 1.1 yamt entry_tagmask(void *p)
158 1.1 yamt {
159 1.1 yamt
160 1.1 yamt return (uintptr_t)p & RADIX_TREE_TAG_MASK;
161 1.1 yamt }
162 1.1 yamt
163 1.1 yamt static inline void *
164 1.1 yamt entry_compose(void *p, unsigned int tagmask)
165 1.1 yamt {
166 1.1 yamt
167 1.1 yamt return (void *)((uintptr_t)p | tagmask);
168 1.1 yamt }
169 1.1 yamt
170 1.1 yamt static inline bool
171 1.1 yamt entry_match_p(void *p, unsigned int tagmask)
172 1.1 yamt {
173 1.1 yamt
174 1.1 yamt KASSERT(entry_ptr(p) != NULL || entry_tagmask(p) == 0);
175 1.1 yamt if (p == NULL) {
176 1.1 yamt return false;
177 1.1 yamt }
178 1.1 yamt if (tagmask == 0) {
179 1.1 yamt return true;
180 1.1 yamt }
181 1.1 yamt return (entry_tagmask(p) & tagmask) != 0;
182 1.1 yamt }
183 1.1 yamt
184 1.1 yamt /*
185 1.1 yamt * radix_tree_node: an intermediate node
186 1.1 yamt *
187 1.1 yamt * we don't care the type of leaf nodes. they are just void *.
188 1.19 ad *
189 1.19 ad * we used to maintain a count of non-NULL nodes in this structure, but it
190 1.19 ad * prevented it from being aligned to a cache line boundary; the performance
191 1.19 ad * benefit from being cache friendly is greater than the benefit of having
192 1.19 ad * a dedicated count value, especially in multi-processor situations where
193 1.19 ad * we need to avoid intra-pool-page false sharing.
194 1.1 yamt */
195 1.1 yamt
196 1.1 yamt struct radix_tree_node {
197 1.1 yamt void *n_ptrs[RADIX_TREE_PTR_PER_NODE];
198 1.1 yamt };
199 1.1 yamt
200 1.7 yamt /*
201 1.1 yamt * p_refs[0].pptr == &t->t_root
202 1.1 yamt * :
203 1.1 yamt * p_refs[n].pptr == &(*p_refs[n-1])->n_ptrs[x]
204 1.1 yamt * :
205 1.1 yamt * :
206 1.1 yamt * p_refs[t->t_height].pptr == &leaf_pointer
207 1.1 yamt */
208 1.1 yamt
209 1.1 yamt struct radix_tree_path {
210 1.1 yamt struct radix_tree_node_ref {
211 1.1 yamt void **pptr;
212 1.1 yamt } p_refs[RADIX_TREE_MAX_HEIGHT + 1]; /* +1 for the root ptr */
213 1.15 yamt /*
214 1.15 yamt * p_lastidx is either the index of the last valid element of p_refs[]
215 1.15 yamt * or RADIX_TREE_INVALID_HEIGHT.
216 1.15 yamt * RADIX_TREE_INVALID_HEIGHT means that radix_tree_lookup_ptr found
217 1.15 yamt * that the height of the tree is not enough to cover the given index.
218 1.15 yamt */
219 1.10 yamt unsigned int p_lastidx;
220 1.1 yamt };
221 1.1 yamt
222 1.1 yamt static inline void **
223 1.13 yamt path_pptr(const struct radix_tree *t, const struct radix_tree_path *p,
224 1.1 yamt unsigned int height)
225 1.1 yamt {
226 1.1 yamt
227 1.1 yamt KASSERT(height <= t->t_height);
228 1.1 yamt return p->p_refs[height].pptr;
229 1.1 yamt }
230 1.1 yamt
231 1.1 yamt static inline struct radix_tree_node *
232 1.13 yamt path_node(const struct radix_tree * t, const struct radix_tree_path *p,
233 1.13 yamt unsigned int height)
234 1.1 yamt {
235 1.1 yamt
236 1.1 yamt KASSERT(height <= t->t_height);
237 1.1 yamt return entry_ptr(*path_pptr(t, p, height));
238 1.1 yamt }
239 1.1 yamt
240 1.1 yamt /*
241 1.1 yamt * radix_tree_init_tree:
242 1.1 yamt *
243 1.18 ad * Initialize a tree.
244 1.1 yamt */
245 1.1 yamt
246 1.1 yamt void
247 1.1 yamt radix_tree_init_tree(struct radix_tree *t)
248 1.1 yamt {
249 1.1 yamt
250 1.1 yamt t->t_height = 0;
251 1.1 yamt t->t_root = NULL;
252 1.1 yamt }
253 1.1 yamt
254 1.1 yamt /*
255 1.18 ad * radix_tree_fini_tree:
256 1.1 yamt *
257 1.18 ad * Finish using a tree.
258 1.1 yamt */
259 1.1 yamt
260 1.1 yamt void
261 1.1 yamt radix_tree_fini_tree(struct radix_tree *t)
262 1.1 yamt {
263 1.1 yamt
264 1.1 yamt KASSERT(t->t_root == NULL);
265 1.1 yamt KASSERT(t->t_height == 0);
266 1.1 yamt }
267 1.1 yamt
268 1.18 ad /*
269 1.18 ad * radix_tree_empty_tree_p:
270 1.18 ad *
271 1.18 ad * Return if the tree is empty.
272 1.18 ad */
273 1.18 ad
274 1.9 yamt bool
275 1.9 yamt radix_tree_empty_tree_p(struct radix_tree *t)
276 1.9 yamt {
277 1.9 yamt
278 1.9 yamt return t->t_root == NULL;
279 1.9 yamt }
280 1.9 yamt
281 1.18 ad /*
282 1.18 ad * radix_tree_empty_tree_p:
283 1.18 ad *
284 1.18 ad * Return true if the tree has any nodes with the given tag. Otherwise
285 1.18 ad * return false.
286 1.18 ad *
287 1.18 ad * It's illegal to call this function with tagmask 0.
288 1.18 ad */
289 1.18 ad
290 1.16 yamt bool
291 1.18 ad radix_tree_empty_tagged_tree_p(struct radix_tree *t, unsigned int tagmask)
292 1.16 yamt {
293 1.16 yamt
294 1.18 ad KASSERT(tagmask != 0);
295 1.16 yamt return (entry_tagmask(t->t_root) & tagmask) == 0;
296 1.16 yamt }
297 1.16 yamt
298 1.3 yamt static void
299 1.3 yamt radix_tree_node_init(struct radix_tree_node *n)
300 1.3 yamt {
301 1.3 yamt
302 1.3 yamt memset(n, 0, sizeof(*n));
303 1.3 yamt }
304 1.3 yamt
305 1.1 yamt #if defined(_KERNEL)
306 1.1 yamt /*
307 1.1 yamt * radix_tree_init:
308 1.1 yamt *
309 1.1 yamt * initialize the subsystem.
310 1.1 yamt */
311 1.1 yamt
312 1.1 yamt void
313 1.1 yamt radix_tree_init(void)
314 1.1 yamt {
315 1.1 yamt
316 1.32 ad /* nothing right now */
317 1.1 yamt }
318 1.22 ad
319 1.22 ad /*
320 1.22 ad * radix_tree_await_memory:
321 1.22 ad *
322 1.22 ad * after an insert has failed with ENOMEM, wait for memory to become
323 1.25 ad * available, so the caller can retry. this needs to ensure that the
324 1.25 ad * maximum possible required number of nodes is available.
325 1.22 ad */
326 1.22 ad
327 1.22 ad void
328 1.22 ad radix_tree_await_memory(void)
329 1.22 ad {
330 1.25 ad struct radix_tree_node *nodes[RADIX_TREE_MAX_HEIGHT];
331 1.25 ad int i;
332 1.22 ad
333 1.25 ad for (i = 0; i < __arraycount(nodes); i++) {
334 1.32 ad nodes[i] = kmem_intr_alloc(sizeof(struct radix_tree_node),
335 1.32 ad KM_SLEEP);
336 1.25 ad }
337 1.25 ad while (--i >= 0) {
338 1.33 ad kmem_intr_free(nodes[i], sizeof(struct radix_tree_node));
339 1.25 ad }
340 1.22 ad }
341 1.22 ad
342 1.1 yamt #endif /* defined(_KERNEL) */
343 1.1 yamt
344 1.24 ad /*
345 1.26 ad * radix_tree_sum_node:
346 1.24 ad *
347 1.24 ad * return the logical sum of all entries in the given node. used to quickly
348 1.24 ad * check for tag masks or empty nodes.
349 1.24 ad */
350 1.24 ad
351 1.24 ad static uintptr_t
352 1.26 ad radix_tree_sum_node(const struct radix_tree_node *n)
353 1.1 yamt {
354 1.19 ad #if RADIX_TREE_PTR_PER_NODE > 16
355 1.25 ad unsigned int i;
356 1.24 ad uintptr_t sum;
357 1.1 yamt
358 1.24 ad for (i = 0, sum = 0; i < RADIX_TREE_PTR_PER_NODE; i++) {
359 1.24 ad sum |= (uintptr_t)n->n_ptrs[i];
360 1.1 yamt }
361 1.24 ad return sum;
362 1.19 ad #else /* RADIX_TREE_PTR_PER_NODE > 16 */
363 1.19 ad uintptr_t sum;
364 1.19 ad
365 1.19 ad /*
366 1.19 ad * Unrolling the above is much better than a tight loop with two
367 1.19 ad * test+branch pairs. On x86 with gcc 5.5.0 this compiles into 19
368 1.19 ad * deterministic instructions including the "return" and prologue &
369 1.19 ad * epilogue.
370 1.19 ad */
371 1.19 ad sum = (uintptr_t)n->n_ptrs[0];
372 1.19 ad sum |= (uintptr_t)n->n_ptrs[1];
373 1.19 ad sum |= (uintptr_t)n->n_ptrs[2];
374 1.19 ad sum |= (uintptr_t)n->n_ptrs[3];
375 1.19 ad #if RADIX_TREE_PTR_PER_NODE > 4
376 1.19 ad sum |= (uintptr_t)n->n_ptrs[4];
377 1.19 ad sum |= (uintptr_t)n->n_ptrs[5];
378 1.19 ad sum |= (uintptr_t)n->n_ptrs[6];
379 1.19 ad sum |= (uintptr_t)n->n_ptrs[7];
380 1.19 ad #endif
381 1.19 ad #if RADIX_TREE_PTR_PER_NODE > 8
382 1.19 ad sum |= (uintptr_t)n->n_ptrs[8];
383 1.19 ad sum |= (uintptr_t)n->n_ptrs[9];
384 1.19 ad sum |= (uintptr_t)n->n_ptrs[10];
385 1.19 ad sum |= (uintptr_t)n->n_ptrs[11];
386 1.19 ad sum |= (uintptr_t)n->n_ptrs[12];
387 1.19 ad sum |= (uintptr_t)n->n_ptrs[13];
388 1.19 ad sum |= (uintptr_t)n->n_ptrs[14];
389 1.19 ad sum |= (uintptr_t)n->n_ptrs[15];
390 1.19 ad #endif
391 1.24 ad return sum;
392 1.19 ad #endif /* RADIX_TREE_PTR_PER_NODE > 16 */
393 1.19 ad }
394 1.19 ad
395 1.19 ad static int __unused
396 1.19 ad radix_tree_node_count_ptrs(const struct radix_tree_node *n)
397 1.19 ad {
398 1.19 ad unsigned int i, c;
399 1.19 ad
400 1.19 ad for (i = c = 0; i < RADIX_TREE_PTR_PER_NODE; i++) {
401 1.19 ad c += (n->n_ptrs[i] != NULL);
402 1.19 ad }
403 1.19 ad return c;
404 1.1 yamt }
405 1.1 yamt
406 1.1 yamt static struct radix_tree_node *
407 1.1 yamt radix_tree_alloc_node(void)
408 1.1 yamt {
409 1.1 yamt struct radix_tree_node *n;
410 1.1 yamt
411 1.1 yamt #if defined(_KERNEL)
412 1.18 ad /*
413 1.34 chs * We must not block waiting for memory because this function
414 1.34 chs * can be called in contexts where waiting for memory is illegal.
415 1.18 ad */
416 1.34 chs n = kmem_intr_alloc(sizeof(struct radix_tree_node), KM_NOSLEEP);
417 1.32 ad #elif defined(_STANDALONE)
418 1.3 yamt n = alloc(sizeof(*n));
419 1.3 yamt #else /* defined(_STANDALONE) */
420 1.3 yamt n = malloc(sizeof(*n));
421 1.3 yamt #endif /* defined(_STANDALONE) */
422 1.3 yamt if (n != NULL) {
423 1.3 yamt radix_tree_node_init(n);
424 1.3 yamt }
425 1.26 ad KASSERT(n == NULL || radix_tree_sum_node(n) == 0);
426 1.1 yamt return n;
427 1.1 yamt }
428 1.1 yamt
429 1.1 yamt static void
430 1.1 yamt radix_tree_free_node(struct radix_tree_node *n)
431 1.1 yamt {
432 1.1 yamt
433 1.26 ad KASSERT(radix_tree_sum_node(n) == 0);
434 1.1 yamt #if defined(_KERNEL)
435 1.32 ad kmem_intr_free(n, sizeof(struct radix_tree_node));
436 1.3 yamt #elif defined(_STANDALONE)
437 1.3 yamt dealloc(n, sizeof(*n));
438 1.3 yamt #else
439 1.1 yamt free(n);
440 1.3 yamt #endif
441 1.1 yamt }
442 1.1 yamt
443 1.25 ad /*
444 1.25 ad * radix_tree_grow:
445 1.25 ad *
446 1.25 ad * increase the height of the tree.
447 1.25 ad */
448 1.25 ad
449 1.25 ad static __noinline int
450 1.1 yamt radix_tree_grow(struct radix_tree *t, unsigned int newheight)
451 1.1 yamt {
452 1.1 yamt const unsigned int tagmask = entry_tagmask(t->t_root);
453 1.25 ad struct radix_tree_node *newnodes[RADIX_TREE_MAX_HEIGHT];
454 1.25 ad void *root;
455 1.25 ad int h;
456 1.1 yamt
457 1.25 ad KASSERT(newheight <= RADIX_TREE_MAX_HEIGHT);
458 1.25 ad if ((root = t->t_root) == NULL) {
459 1.1 yamt t->t_height = newheight;
460 1.1 yamt return 0;
461 1.1 yamt }
462 1.25 ad for (h = t->t_height; h < newheight; h++) {
463 1.25 ad newnodes[h] = radix_tree_alloc_node();
464 1.25 ad if (__predict_false(newnodes[h] == NULL)) {
465 1.25 ad while (--h >= (int)t->t_height) {
466 1.25 ad newnodes[h]->n_ptrs[0] = NULL;
467 1.25 ad radix_tree_free_node(newnodes[h]);
468 1.25 ad }
469 1.1 yamt return ENOMEM;
470 1.1 yamt }
471 1.25 ad newnodes[h]->n_ptrs[0] = root;
472 1.25 ad root = entry_compose(newnodes[h], tagmask);
473 1.1 yamt }
474 1.25 ad t->t_root = root;
475 1.25 ad t->t_height = h;
476 1.1 yamt return 0;
477 1.1 yamt }
478 1.1 yamt
479 1.5 yamt /*
480 1.5 yamt * radix_tree_lookup_ptr:
481 1.5 yamt *
482 1.5 yamt * an internal helper function used for various exported functions.
483 1.5 yamt *
484 1.5 yamt * return the pointer to store the node for the given index.
485 1.5 yamt *
486 1.5 yamt * if alloc is true, try to allocate the storage. (note for _KERNEL:
487 1.5 yamt * in that case, this function can block.) if the allocation failed or
488 1.5 yamt * alloc is false, return NULL.
489 1.5 yamt *
490 1.5 yamt * if path is not NULL, fill it for the caller's investigation.
491 1.5 yamt *
492 1.5 yamt * if tagmask is not zero, search only for nodes with the tag set.
493 1.15 yamt * note that, however, this function doesn't check the tagmask for the leaf
494 1.15 yamt * pointer. it's a caller's responsibility to investigate the value which
495 1.15 yamt * is pointed by the returned pointer if necessary.
496 1.5 yamt *
497 1.5 yamt * while this function is a bit large, as it's called with some constant
498 1.5 yamt * arguments, inlining might have benefits. anyway, a compiler will decide.
499 1.5 yamt */
500 1.5 yamt
501 1.1 yamt static inline void **
502 1.1 yamt radix_tree_lookup_ptr(struct radix_tree *t, uint64_t idx,
503 1.1 yamt struct radix_tree_path *path, bool alloc, const unsigned int tagmask)
504 1.1 yamt {
505 1.1 yamt struct radix_tree_node *n;
506 1.1 yamt int hshift = RADIX_TREE_BITS_PER_HEIGHT * t->t_height;
507 1.1 yamt int shift;
508 1.1 yamt void **vpp;
509 1.1 yamt const uint64_t mask = (UINT64_C(1) << RADIX_TREE_BITS_PER_HEIGHT) - 1;
510 1.1 yamt struct radix_tree_node_ref *refs = NULL;
511 1.1 yamt
512 1.5 yamt /*
513 1.5 yamt * check unsupported combinations
514 1.5 yamt */
515 1.1 yamt KASSERT(tagmask == 0 || !alloc);
516 1.1 yamt KASSERT(path == NULL || !alloc);
517 1.1 yamt vpp = &t->t_root;
518 1.1 yamt if (path != NULL) {
519 1.1 yamt refs = path->p_refs;
520 1.1 yamt refs->pptr = vpp;
521 1.1 yamt }
522 1.1 yamt n = NULL;
523 1.1 yamt for (shift = 64 - RADIX_TREE_BITS_PER_HEIGHT; shift >= 0;) {
524 1.1 yamt struct radix_tree_node *c;
525 1.1 yamt void *entry;
526 1.1 yamt const uint64_t i = (idx >> shift) & mask;
527 1.1 yamt
528 1.1 yamt if (shift >= hshift) {
529 1.1 yamt unsigned int newheight;
530 1.1 yamt
531 1.1 yamt KASSERT(vpp == &t->t_root);
532 1.1 yamt if (i == 0) {
533 1.1 yamt shift -= RADIX_TREE_BITS_PER_HEIGHT;
534 1.1 yamt continue;
535 1.1 yamt }
536 1.1 yamt if (!alloc) {
537 1.1 yamt if (path != NULL) {
538 1.1 yamt KASSERT((refs - path->p_refs) == 0);
539 1.15 yamt path->p_lastidx =
540 1.15 yamt RADIX_TREE_INVALID_HEIGHT;
541 1.1 yamt }
542 1.1 yamt return NULL;
543 1.1 yamt }
544 1.1 yamt newheight = shift / RADIX_TREE_BITS_PER_HEIGHT + 1;
545 1.1 yamt if (radix_tree_grow(t, newheight)) {
546 1.1 yamt return NULL;
547 1.1 yamt }
548 1.1 yamt hshift = RADIX_TREE_BITS_PER_HEIGHT * t->t_height;
549 1.1 yamt }
550 1.1 yamt entry = *vpp;
551 1.1 yamt c = entry_ptr(entry);
552 1.1 yamt if (c == NULL ||
553 1.1 yamt (tagmask != 0 &&
554 1.1 yamt (entry_tagmask(entry) & tagmask) == 0)) {
555 1.1 yamt if (!alloc) {
556 1.1 yamt if (path != NULL) {
557 1.1 yamt path->p_lastidx = refs - path->p_refs;
558 1.1 yamt }
559 1.1 yamt return NULL;
560 1.1 yamt }
561 1.1 yamt c = radix_tree_alloc_node();
562 1.1 yamt if (c == NULL) {
563 1.1 yamt return NULL;
564 1.1 yamt }
565 1.1 yamt *vpp = c;
566 1.1 yamt }
567 1.1 yamt n = c;
568 1.1 yamt vpp = &n->n_ptrs[i];
569 1.1 yamt if (path != NULL) {
570 1.1 yamt refs++;
571 1.1 yamt refs->pptr = vpp;
572 1.1 yamt }
573 1.1 yamt shift -= RADIX_TREE_BITS_PER_HEIGHT;
574 1.1 yamt }
575 1.1 yamt if (alloc) {
576 1.1 yamt KASSERT(*vpp == NULL);
577 1.1 yamt }
578 1.1 yamt if (path != NULL) {
579 1.1 yamt path->p_lastidx = refs - path->p_refs;
580 1.1 yamt }
581 1.1 yamt return vpp;
582 1.1 yamt }
583 1.1 yamt
584 1.1 yamt /*
585 1.25 ad * radix_tree_undo_insert_node:
586 1.25 ad *
587 1.25 ad * Undo the effects of a failed insert. The conditions that led to the
588 1.25 ad * insert may change and it may not be retried. If the insert is not
589 1.25 ad * retried, there will be no corresponding radix_tree_remove_node() for
590 1.25 ad * this index in the future. Therefore any adjustments made to the tree
591 1.25 ad * before memory was exhausted must be reverted.
592 1.25 ad */
593 1.25 ad
594 1.25 ad static __noinline void
595 1.25 ad radix_tree_undo_insert_node(struct radix_tree *t, uint64_t idx)
596 1.25 ad {
597 1.25 ad struct radix_tree_path path;
598 1.25 ad int i;
599 1.25 ad
600 1.25 ad (void)radix_tree_lookup_ptr(t, idx, &path, false, 0);
601 1.25 ad if (path.p_lastidx == RADIX_TREE_INVALID_HEIGHT) {
602 1.25 ad /*
603 1.25 ad * no nodes were inserted.
604 1.25 ad */
605 1.25 ad return;
606 1.25 ad }
607 1.25 ad for (i = path.p_lastidx - 1; i >= 0; i--) {
608 1.25 ad struct radix_tree_node ** const pptr =
609 1.25 ad (struct radix_tree_node **)path_pptr(t, &path, i);
610 1.25 ad struct radix_tree_node *n;
611 1.25 ad
612 1.25 ad KASSERT(pptr != NULL);
613 1.25 ad n = entry_ptr(*pptr);
614 1.25 ad KASSERT(n != NULL);
615 1.26 ad if (radix_tree_sum_node(n) != 0) {
616 1.25 ad break;
617 1.25 ad }
618 1.25 ad radix_tree_free_node(n);
619 1.25 ad *pptr = NULL;
620 1.25 ad }
621 1.25 ad /*
622 1.25 ad * fix up height
623 1.25 ad */
624 1.25 ad if (i < 0) {
625 1.25 ad KASSERT(t->t_root == NULL);
626 1.25 ad t->t_height = 0;
627 1.25 ad }
628 1.25 ad }
629 1.25 ad
630 1.25 ad /*
631 1.1 yamt * radix_tree_insert_node:
632 1.1 yamt *
633 1.18 ad * Insert the node at the given index.
634 1.18 ad *
635 1.18 ad * It's illegal to insert NULL. It's illegal to insert a non-aligned pointer.
636 1.1 yamt *
637 1.18 ad * This function returns ENOMEM if necessary memory allocation failed.
638 1.18 ad * Otherwise, this function returns 0.
639 1.1 yamt *
640 1.18 ad * Note that inserting a node can involves memory allocation for intermediate
641 1.18 ad * nodes. If _KERNEL, it's done with no-sleep IPL_NONE memory allocation.
642 1.4 yamt *
643 1.18 ad * For the newly inserted node, all tags are cleared.
644 1.1 yamt */
645 1.1 yamt
646 1.1 yamt int
647 1.1 yamt radix_tree_insert_node(struct radix_tree *t, uint64_t idx, void *p)
648 1.1 yamt {
649 1.1 yamt void **vpp;
650 1.1 yamt
651 1.1 yamt KASSERT(p != NULL);
652 1.18 ad KASSERT(entry_tagmask(entry_compose(p, 0)) == 0);
653 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, true, 0);
654 1.25 ad if (__predict_false(vpp == NULL)) {
655 1.25 ad radix_tree_undo_insert_node(t, idx);
656 1.1 yamt return ENOMEM;
657 1.1 yamt }
658 1.1 yamt KASSERT(*vpp == NULL);
659 1.1 yamt *vpp = p;
660 1.1 yamt return 0;
661 1.1 yamt }
662 1.1 yamt
663 1.4 yamt /*
664 1.4 yamt * radix_tree_replace_node:
665 1.4 yamt *
666 1.18 ad * Replace a node at the given index with the given node and return the
667 1.18 ad * replaced one.
668 1.18 ad *
669 1.18 ad * It's illegal to try to replace a node which has not been inserted.
670 1.4 yamt *
671 1.18 ad * This function keeps tags intact.
672 1.4 yamt */
673 1.4 yamt
674 1.1 yamt void *
675 1.1 yamt radix_tree_replace_node(struct radix_tree *t, uint64_t idx, void *p)
676 1.1 yamt {
677 1.1 yamt void **vpp;
678 1.1 yamt void *oldp;
679 1.1 yamt
680 1.1 yamt KASSERT(p != NULL);
681 1.18 ad KASSERT(entry_tagmask(entry_compose(p, 0)) == 0);
682 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0);
683 1.1 yamt KASSERT(vpp != NULL);
684 1.1 yamt oldp = *vpp;
685 1.1 yamt KASSERT(oldp != NULL);
686 1.1 yamt *vpp = entry_compose(p, entry_tagmask(*vpp));
687 1.1 yamt return entry_ptr(oldp);
688 1.1 yamt }
689 1.1 yamt
690 1.1 yamt /*
691 1.1 yamt * radix_tree_remove_node:
692 1.1 yamt *
693 1.18 ad * Remove the node at the given index.
694 1.18 ad *
695 1.18 ad * It's illegal to try to remove a node which has not been inserted.
696 1.1 yamt */
697 1.1 yamt
698 1.1 yamt void *
699 1.1 yamt radix_tree_remove_node(struct radix_tree *t, uint64_t idx)
700 1.1 yamt {
701 1.1 yamt struct radix_tree_path path;
702 1.1 yamt void **vpp;
703 1.1 yamt void *oldp;
704 1.1 yamt int i;
705 1.1 yamt
706 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0);
707 1.1 yamt KASSERT(vpp != NULL);
708 1.1 yamt oldp = *vpp;
709 1.1 yamt KASSERT(oldp != NULL);
710 1.1 yamt KASSERT(path.p_lastidx == t->t_height);
711 1.1 yamt KASSERT(vpp == path_pptr(t, &path, path.p_lastidx));
712 1.1 yamt *vpp = NULL;
713 1.1 yamt for (i = t->t_height - 1; i >= 0; i--) {
714 1.1 yamt void *entry;
715 1.1 yamt struct radix_tree_node ** const pptr =
716 1.1 yamt (struct radix_tree_node **)path_pptr(t, &path, i);
717 1.1 yamt struct radix_tree_node *n;
718 1.1 yamt
719 1.1 yamt KASSERT(pptr != NULL);
720 1.1 yamt entry = *pptr;
721 1.1 yamt n = entry_ptr(entry);
722 1.1 yamt KASSERT(n != NULL);
723 1.26 ad if (radix_tree_sum_node(n) != 0) {
724 1.1 yamt break;
725 1.1 yamt }
726 1.1 yamt radix_tree_free_node(n);
727 1.1 yamt *pptr = NULL;
728 1.1 yamt }
729 1.1 yamt /*
730 1.1 yamt * fix up height
731 1.1 yamt */
732 1.1 yamt if (i < 0) {
733 1.1 yamt KASSERT(t->t_root == NULL);
734 1.1 yamt t->t_height = 0;
735 1.1 yamt }
736 1.1 yamt /*
737 1.1 yamt * update tags
738 1.1 yamt */
739 1.1 yamt for (; i >= 0; i--) {
740 1.1 yamt void *entry;
741 1.1 yamt struct radix_tree_node ** const pptr =
742 1.1 yamt (struct radix_tree_node **)path_pptr(t, &path, i);
743 1.1 yamt struct radix_tree_node *n;
744 1.1 yamt unsigned int newmask;
745 1.1 yamt
746 1.1 yamt KASSERT(pptr != NULL);
747 1.1 yamt entry = *pptr;
748 1.1 yamt n = entry_ptr(entry);
749 1.1 yamt KASSERT(n != NULL);
750 1.26 ad KASSERT(radix_tree_sum_node(n) != 0);
751 1.26 ad newmask = radix_tree_sum_node(n) & RADIX_TREE_TAG_MASK;
752 1.1 yamt if (newmask == entry_tagmask(entry)) {
753 1.1 yamt break;
754 1.1 yamt }
755 1.1 yamt *pptr = entry_compose(n, newmask);
756 1.1 yamt }
757 1.1 yamt /*
758 1.1 yamt * XXX is it worth to try to reduce height?
759 1.1 yamt * if we do that, make radix_tree_grow rollback its change as well.
760 1.1 yamt */
761 1.1 yamt return entry_ptr(oldp);
762 1.1 yamt }
763 1.1 yamt
764 1.1 yamt /*
765 1.1 yamt * radix_tree_lookup_node:
766 1.1 yamt *
767 1.18 ad * Returns the node at the given index.
768 1.18 ad * Returns NULL if nothing is found at the given index.
769 1.1 yamt */
770 1.1 yamt
771 1.1 yamt void *
772 1.1 yamt radix_tree_lookup_node(struct radix_tree *t, uint64_t idx)
773 1.1 yamt {
774 1.1 yamt void **vpp;
775 1.1 yamt
776 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0);
777 1.1 yamt if (vpp == NULL) {
778 1.1 yamt return NULL;
779 1.1 yamt }
780 1.1 yamt return entry_ptr(*vpp);
781 1.1 yamt }
782 1.1 yamt
783 1.1 yamt static inline void
784 1.1 yamt gang_lookup_init(struct radix_tree *t, uint64_t idx,
785 1.1 yamt struct radix_tree_path *path, const unsigned int tagmask)
786 1.1 yamt {
787 1.18 ad void **vpp __unused;
788 1.1 yamt
789 1.19 ad vpp = radix_tree_lookup_ptr(t, idx, path, false, tagmask);
790 1.1 yamt KASSERT(vpp == NULL ||
791 1.1 yamt vpp == path_pptr(t, path, path->p_lastidx));
792 1.1 yamt KASSERT(&t->t_root == path_pptr(t, path, 0));
793 1.15 yamt KASSERT(path->p_lastidx == RADIX_TREE_INVALID_HEIGHT ||
794 1.15 yamt path->p_lastidx == t->t_height ||
795 1.15 yamt !entry_match_p(*path_pptr(t, path, path->p_lastidx), tagmask));
796 1.1 yamt }
797 1.1 yamt
798 1.15 yamt /*
799 1.15 yamt * gang_lookup_scan:
800 1.15 yamt *
801 1.15 yamt * a helper routine for radix_tree_gang_lookup_node and its variants.
802 1.15 yamt */
803 1.15 yamt
804 1.1 yamt static inline unsigned int
805 1.15 yamt __attribute__((__always_inline__))
806 1.1 yamt gang_lookup_scan(struct radix_tree *t, struct radix_tree_path *path,
807 1.18 ad void **results, const unsigned int maxresults, const unsigned int tagmask,
808 1.18 ad const bool reverse, const bool dense)
809 1.1 yamt {
810 1.15 yamt
811 1.15 yamt /*
812 1.15 yamt * we keep the path updated only for lastidx-1.
813 1.15 yamt * vpp is what path_pptr(t, path, lastidx) would be.
814 1.15 yamt */
815 1.1 yamt void **vpp;
816 1.10 yamt unsigned int nfound;
817 1.1 yamt unsigned int lastidx;
818 1.15 yamt /*
819 1.15 yamt * set up scan direction dependant constants so that we can iterate
820 1.15 yamt * n_ptrs as the following.
821 1.15 yamt *
822 1.15 yamt * for (i = first; i != guard; i += step)
823 1.15 yamt * visit n->n_ptrs[i];
824 1.15 yamt */
825 1.15 yamt const int step = reverse ? -1 : 1;
826 1.15 yamt const unsigned int first = reverse ? RADIX_TREE_PTR_PER_NODE - 1 : 0;
827 1.15 yamt const unsigned int last = reverse ? 0 : RADIX_TREE_PTR_PER_NODE - 1;
828 1.15 yamt const unsigned int guard = last + step;
829 1.1 yamt
830 1.1 yamt KASSERT(maxresults > 0);
831 1.15 yamt KASSERT(&t->t_root == path_pptr(t, path, 0));
832 1.1 yamt lastidx = path->p_lastidx;
833 1.15 yamt KASSERT(lastidx == RADIX_TREE_INVALID_HEIGHT ||
834 1.15 yamt lastidx == t->t_height ||
835 1.15 yamt !entry_match_p(*path_pptr(t, path, lastidx), tagmask));
836 1.15 yamt nfound = 0;
837 1.15 yamt if (lastidx == RADIX_TREE_INVALID_HEIGHT) {
838 1.18 ad /*
839 1.18 ad * requested idx is beyond the right-most node.
840 1.18 ad */
841 1.18 ad if (reverse && !dense) {
842 1.15 yamt lastidx = 0;
843 1.15 yamt vpp = path_pptr(t, path, lastidx);
844 1.15 yamt goto descend;
845 1.15 yamt }
846 1.1 yamt return 0;
847 1.1 yamt }
848 1.1 yamt vpp = path_pptr(t, path, lastidx);
849 1.1 yamt while (/*CONSTCOND*/true) {
850 1.1 yamt struct radix_tree_node *n;
851 1.10 yamt unsigned int i;
852 1.1 yamt
853 1.1 yamt if (entry_match_p(*vpp, tagmask)) {
854 1.1 yamt KASSERT(lastidx == t->t_height);
855 1.1 yamt /*
856 1.15 yamt * record the matching non-NULL leaf.
857 1.1 yamt */
858 1.1 yamt results[nfound] = entry_ptr(*vpp);
859 1.1 yamt nfound++;
860 1.1 yamt if (nfound == maxresults) {
861 1.1 yamt return nfound;
862 1.1 yamt }
863 1.18 ad } else if (dense) {
864 1.18 ad return nfound;
865 1.1 yamt }
866 1.1 yamt scan_siblings:
867 1.1 yamt /*
868 1.15 yamt * try to find the next matching non-NULL sibling.
869 1.1 yamt */
870 1.15 yamt if (lastidx == 0) {
871 1.15 yamt /*
872 1.15 yamt * the root has no siblings.
873 1.15 yamt * we've done.
874 1.15 yamt */
875 1.15 yamt KASSERT(vpp == &t->t_root);
876 1.15 yamt break;
877 1.15 yamt }
878 1.1 yamt n = path_node(t, path, lastidx - 1);
879 1.15 yamt for (i = vpp - n->n_ptrs + step; i != guard; i += step) {
880 1.15 yamt KASSERT(i < RADIX_TREE_PTR_PER_NODE);
881 1.1 yamt if (entry_match_p(n->n_ptrs[i], tagmask)) {
882 1.1 yamt vpp = &n->n_ptrs[i];
883 1.1 yamt break;
884 1.23 ad } else if (dense) {
885 1.23 ad return nfound;
886 1.1 yamt }
887 1.1 yamt }
888 1.15 yamt if (i == guard) {
889 1.1 yamt /*
890 1.1 yamt * not found. go to parent.
891 1.1 yamt */
892 1.1 yamt lastidx--;
893 1.1 yamt vpp = path_pptr(t, path, lastidx);
894 1.1 yamt goto scan_siblings;
895 1.1 yamt }
896 1.15 yamt descend:
897 1.1 yamt /*
898 1.15 yamt * following the left-most (or right-most in the case of
899 1.28 andvar * reverse scan) child node, descend until reaching the leaf or
900 1.29 andvar * a non-matching entry.
901 1.1 yamt */
902 1.1 yamt while (entry_match_p(*vpp, tagmask) && lastidx < t->t_height) {
903 1.15 yamt /*
904 1.15 yamt * save vpp in the path so that we can come back to this
905 1.15 yamt * node after finishing visiting children.
906 1.15 yamt */
907 1.15 yamt path->p_refs[lastidx].pptr = vpp;
908 1.1 yamt n = entry_ptr(*vpp);
909 1.15 yamt vpp = &n->n_ptrs[first];
910 1.1 yamt lastidx++;
911 1.1 yamt }
912 1.1 yamt }
913 1.15 yamt return nfound;
914 1.1 yamt }
915 1.1 yamt
916 1.1 yamt /*
917 1.1 yamt * radix_tree_gang_lookup_node:
918 1.1 yamt *
919 1.18 ad * Scan the tree starting from the given index in the ascending order and
920 1.18 ad * return found nodes.
921 1.18 ad *
922 1.1 yamt * results should be an array large enough to hold maxresults pointers.
923 1.18 ad * This function returns the number of nodes found, up to maxresults.
924 1.18 ad * Returning less than maxresults means there are no more nodes in the tree.
925 1.1 yamt *
926 1.18 ad * If dense == true, this function stops scanning when it founds a hole of
927 1.18 ad * indexes. I.e. an index for which radix_tree_lookup_node would returns NULL.
928 1.18 ad * If dense == false, this function skips holes and continue scanning until
929 1.18 ad * maxresults nodes are found or it reaches the limit of the index range.
930 1.18 ad *
931 1.18 ad * The result of this function is semantically equivalent to what could be
932 1.1 yamt * obtained by repeated calls of radix_tree_lookup_node with increasing index.
933 1.18 ad * but this function is expected to be computationally cheaper when looking up
934 1.18 ad * multiple nodes at once. Especially, it's expected to be much cheaper when
935 1.18 ad * node indexes are distributed sparsely.
936 1.18 ad *
937 1.18 ad * Note that this function doesn't return index values of found nodes.
938 1.18 ad * Thus, in the case of dense == false, if index values are important for
939 1.18 ad * a caller, it's the caller's responsibility to check them, typically
940 1.29 andvar * by examining the returned nodes using some caller-specific knowledge
941 1.18 ad * about them.
942 1.18 ad * In the case of dense == true, a node returned via results[N] is always for
943 1.18 ad * the index (idx + N).
944 1.1 yamt */
945 1.1 yamt
946 1.1 yamt unsigned int
947 1.1 yamt radix_tree_gang_lookup_node(struct radix_tree *t, uint64_t idx,
948 1.18 ad void **results, unsigned int maxresults, bool dense)
949 1.1 yamt {
950 1.1 yamt struct radix_tree_path path;
951 1.1 yamt
952 1.1 yamt gang_lookup_init(t, idx, &path, 0);
953 1.18 ad return gang_lookup_scan(t, &path, results, maxresults, 0, false, dense);
954 1.15 yamt }
955 1.15 yamt
956 1.15 yamt /*
957 1.15 yamt * radix_tree_gang_lookup_node_reverse:
958 1.15 yamt *
959 1.18 ad * Same as radix_tree_gang_lookup_node except that this one scans the
960 1.18 ad * tree in the reverse order. I.e. descending index values.
961 1.15 yamt */
962 1.15 yamt
963 1.15 yamt unsigned int
964 1.15 yamt radix_tree_gang_lookup_node_reverse(struct radix_tree *t, uint64_t idx,
965 1.18 ad void **results, unsigned int maxresults, bool dense)
966 1.15 yamt {
967 1.15 yamt struct radix_tree_path path;
968 1.15 yamt
969 1.15 yamt gang_lookup_init(t, idx, &path, 0);
970 1.18 ad return gang_lookup_scan(t, &path, results, maxresults, 0, true, dense);
971 1.1 yamt }
972 1.1 yamt
973 1.1 yamt /*
974 1.1 yamt * radix_tree_gang_lookup_tagged_node:
975 1.1 yamt *
976 1.18 ad * Same as radix_tree_gang_lookup_node except that this one only returns
977 1.1 yamt * nodes tagged with tagid.
978 1.18 ad *
979 1.18 ad * It's illegal to call this function with tagmask 0.
980 1.1 yamt */
981 1.1 yamt
982 1.1 yamt unsigned int
983 1.1 yamt radix_tree_gang_lookup_tagged_node(struct radix_tree *t, uint64_t idx,
984 1.18 ad void **results, unsigned int maxresults, bool dense, unsigned int tagmask)
985 1.1 yamt {
986 1.1 yamt struct radix_tree_path path;
987 1.1 yamt
988 1.18 ad KASSERT(tagmask != 0);
989 1.1 yamt gang_lookup_init(t, idx, &path, tagmask);
990 1.18 ad return gang_lookup_scan(t, &path, results, maxresults, tagmask, false,
991 1.18 ad dense);
992 1.15 yamt }
993 1.15 yamt
994 1.15 yamt /*
995 1.15 yamt * radix_tree_gang_lookup_tagged_node_reverse:
996 1.15 yamt *
997 1.18 ad * Same as radix_tree_gang_lookup_tagged_node except that this one scans the
998 1.18 ad * tree in the reverse order. I.e. descending index values.
999 1.15 yamt */
1000 1.15 yamt
1001 1.15 yamt unsigned int
1002 1.15 yamt radix_tree_gang_lookup_tagged_node_reverse(struct radix_tree *t, uint64_t idx,
1003 1.18 ad void **results, unsigned int maxresults, bool dense, unsigned int tagmask)
1004 1.15 yamt {
1005 1.15 yamt struct radix_tree_path path;
1006 1.15 yamt
1007 1.18 ad KASSERT(tagmask != 0);
1008 1.15 yamt gang_lookup_init(t, idx, &path, tagmask);
1009 1.18 ad return gang_lookup_scan(t, &path, results, maxresults, tagmask, true,
1010 1.18 ad dense);
1011 1.1 yamt }
1012 1.1 yamt
1013 1.4 yamt /*
1014 1.4 yamt * radix_tree_get_tag:
1015 1.4 yamt *
1016 1.18 ad * Return the tagmask for the node at the given index.
1017 1.18 ad *
1018 1.18 ad * It's illegal to call this function for a node which has not been inserted.
1019 1.4 yamt */
1020 1.4 yamt
1021 1.18 ad unsigned int
1022 1.18 ad radix_tree_get_tag(struct radix_tree *t, uint64_t idx, unsigned int tagmask)
1023 1.1 yamt {
1024 1.18 ad /*
1025 1.18 ad * the following two implementations should behave same.
1026 1.18 ad * the former one was chosen because it seems faster.
1027 1.18 ad */
1028 1.1 yamt #if 1
1029 1.1 yamt void **vpp;
1030 1.1 yamt
1031 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, false, tagmask);
1032 1.1 yamt if (vpp == NULL) {
1033 1.1 yamt return false;
1034 1.1 yamt }
1035 1.1 yamt KASSERT(*vpp != NULL);
1036 1.18 ad return (entry_tagmask(*vpp) & tagmask);
1037 1.1 yamt #else
1038 1.1 yamt void **vpp;
1039 1.1 yamt
1040 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0);
1041 1.1 yamt KASSERT(vpp != NULL);
1042 1.18 ad return (entry_tagmask(*vpp) & tagmask);
1043 1.1 yamt #endif
1044 1.1 yamt }
1045 1.1 yamt
1046 1.4 yamt /*
1047 1.4 yamt * radix_tree_set_tag:
1048 1.4 yamt *
1049 1.18 ad * Set the tag for the node at the given index.
1050 1.18 ad *
1051 1.18 ad * It's illegal to call this function for a node which has not been inserted.
1052 1.18 ad * It's illegal to call this function with tagmask 0.
1053 1.4 yamt */
1054 1.4 yamt
1055 1.1 yamt void
1056 1.18 ad radix_tree_set_tag(struct radix_tree *t, uint64_t idx, unsigned int tagmask)
1057 1.1 yamt {
1058 1.1 yamt struct radix_tree_path path;
1059 1.18 ad void **vpp __unused;
1060 1.1 yamt int i;
1061 1.1 yamt
1062 1.18 ad KASSERT(tagmask != 0);
1063 1.19 ad vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0);
1064 1.1 yamt KASSERT(vpp != NULL);
1065 1.1 yamt KASSERT(*vpp != NULL);
1066 1.1 yamt KASSERT(path.p_lastidx == t->t_height);
1067 1.1 yamt KASSERT(vpp == path_pptr(t, &path, path.p_lastidx));
1068 1.1 yamt for (i = t->t_height; i >= 0; i--) {
1069 1.1 yamt void ** const pptr = (void **)path_pptr(t, &path, i);
1070 1.1 yamt void *entry;
1071 1.1 yamt
1072 1.1 yamt KASSERT(pptr != NULL);
1073 1.1 yamt entry = *pptr;
1074 1.1 yamt if ((entry_tagmask(entry) & tagmask) != 0) {
1075 1.1 yamt break;
1076 1.1 yamt }
1077 1.1 yamt *pptr = (void *)((uintptr_t)entry | tagmask);
1078 1.1 yamt }
1079 1.1 yamt }
1080 1.1 yamt
1081 1.4 yamt /*
1082 1.4 yamt * radix_tree_clear_tag:
1083 1.4 yamt *
1084 1.18 ad * Clear the tag for the node at the given index.
1085 1.18 ad *
1086 1.18 ad * It's illegal to call this function for a node which has not been inserted.
1087 1.18 ad * It's illegal to call this function with tagmask 0.
1088 1.4 yamt */
1089 1.4 yamt
1090 1.1 yamt void
1091 1.18 ad radix_tree_clear_tag(struct radix_tree *t, uint64_t idx, unsigned int tagmask)
1092 1.1 yamt {
1093 1.1 yamt struct radix_tree_path path;
1094 1.1 yamt void **vpp;
1095 1.1 yamt int i;
1096 1.1 yamt
1097 1.18 ad KASSERT(tagmask != 0);
1098 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0);
1099 1.1 yamt KASSERT(vpp != NULL);
1100 1.1 yamt KASSERT(*vpp != NULL);
1101 1.1 yamt KASSERT(path.p_lastidx == t->t_height);
1102 1.1 yamt KASSERT(vpp == path_pptr(t, &path, path.p_lastidx));
1103 1.7 yamt /*
1104 1.7 yamt * if already cleared, nothing to do
1105 1.7 yamt */
1106 1.1 yamt if ((entry_tagmask(*vpp) & tagmask) == 0) {
1107 1.1 yamt return;
1108 1.1 yamt }
1109 1.7 yamt /*
1110 1.7 yamt * clear the tag only if no children have the tag.
1111 1.7 yamt */
1112 1.1 yamt for (i = t->t_height; i >= 0; i--) {
1113 1.1 yamt void ** const pptr = (void **)path_pptr(t, &path, i);
1114 1.1 yamt void *entry;
1115 1.1 yamt
1116 1.1 yamt KASSERT(pptr != NULL);
1117 1.1 yamt entry = *pptr;
1118 1.1 yamt KASSERT((entry_tagmask(entry) & tagmask) != 0);
1119 1.1 yamt *pptr = entry_compose(entry_ptr(entry),
1120 1.1 yamt entry_tagmask(entry) & ~tagmask);
1121 1.7 yamt /*
1122 1.7 yamt * check if we should proceed to process the next level.
1123 1.7 yamt */
1124 1.7 yamt if (0 < i) {
1125 1.1 yamt struct radix_tree_node *n = path_node(t, &path, i - 1);
1126 1.1 yamt
1127 1.26 ad if ((radix_tree_sum_node(n) & tagmask) != 0) {
1128 1.1 yamt break;
1129 1.1 yamt }
1130 1.1 yamt }
1131 1.1 yamt }
1132 1.1 yamt }
1133 1.1 yamt
1134 1.1 yamt #if defined(UNITTEST)
1135 1.1 yamt
1136 1.1 yamt #include <inttypes.h>
1137 1.1 yamt #include <stdio.h>
1138 1.1 yamt
1139 1.1 yamt static void
1140 1.1 yamt radix_tree_dump_node(const struct radix_tree *t, void *vp,
1141 1.1 yamt uint64_t offset, unsigned int height)
1142 1.1 yamt {
1143 1.1 yamt struct radix_tree_node *n;
1144 1.1 yamt unsigned int i;
1145 1.1 yamt
1146 1.1 yamt for (i = 0; i < t->t_height - height; i++) {
1147 1.1 yamt printf(" ");
1148 1.1 yamt }
1149 1.1 yamt if (entry_tagmask(vp) == 0) {
1150 1.1 yamt printf("[%" PRIu64 "] %p", offset, entry_ptr(vp));
1151 1.1 yamt } else {
1152 1.1 yamt printf("[%" PRIu64 "] %p (tagmask=0x%x)", offset, entry_ptr(vp),
1153 1.1 yamt entry_tagmask(vp));
1154 1.1 yamt }
1155 1.1 yamt if (height == 0) {
1156 1.1 yamt printf(" (leaf)\n");
1157 1.1 yamt return;
1158 1.1 yamt }
1159 1.1 yamt n = entry_ptr(vp);
1160 1.26 ad assert((radix_tree_sum_node(n) & RADIX_TREE_TAG_MASK) ==
1161 1.24 ad entry_tagmask(vp));
1162 1.19 ad printf(" (%u children)\n", radix_tree_node_count_ptrs(n));
1163 1.1 yamt for (i = 0; i < __arraycount(n->n_ptrs); i++) {
1164 1.1 yamt void *c;
1165 1.1 yamt
1166 1.1 yamt c = n->n_ptrs[i];
1167 1.1 yamt if (c == NULL) {
1168 1.1 yamt continue;
1169 1.1 yamt }
1170 1.1 yamt radix_tree_dump_node(t, c,
1171 1.1 yamt offset + i * (UINT64_C(1) <<
1172 1.1 yamt (RADIX_TREE_BITS_PER_HEIGHT * (height - 1))), height - 1);
1173 1.1 yamt }
1174 1.1 yamt }
1175 1.1 yamt
1176 1.1 yamt void radix_tree_dump(const struct radix_tree *);
1177 1.1 yamt
1178 1.1 yamt void
1179 1.1 yamt radix_tree_dump(const struct radix_tree *t)
1180 1.1 yamt {
1181 1.1 yamt
1182 1.1 yamt printf("tree %p height=%u\n", t, t->t_height);
1183 1.1 yamt radix_tree_dump_node(t, t->t_root, 0, t->t_height);
1184 1.1 yamt }
1185 1.1 yamt
1186 1.1 yamt static void
1187 1.1 yamt test1(void)
1188 1.1 yamt {
1189 1.1 yamt struct radix_tree s;
1190 1.1 yamt struct radix_tree *t = &s;
1191 1.1 yamt void *results[3];
1192 1.1 yamt
1193 1.1 yamt radix_tree_init_tree(t);
1194 1.1 yamt radix_tree_dump(t);
1195 1.1 yamt assert(radix_tree_lookup_node(t, 0) == NULL);
1196 1.1 yamt assert(radix_tree_lookup_node(t, 1000) == NULL);
1197 1.18 ad assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 0);
1198 1.18 ad assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 0);
1199 1.18 ad assert(radix_tree_gang_lookup_node(t, 1000, results, 3, false) == 0);
1200 1.18 ad assert(radix_tree_gang_lookup_node(t, 1000, results, 3, true) == 0);
1201 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, false) ==
1202 1.18 ad 0);
1203 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, true) ==
1204 1.18 ad 0);
1205 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, false)
1206 1.18 ad == 0);
1207 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, true)
1208 1.18 ad == 0);
1209 1.18 ad assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, false, 1)
1210 1.18 ad == 0);
1211 1.18 ad assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, true, 1)
1212 1.18 ad == 0);
1213 1.18 ad assert(radix_tree_gang_lookup_tagged_node(t, 1000, results, 3, false, 1)
1214 1.18 ad == 0);
1215 1.18 ad assert(radix_tree_gang_lookup_tagged_node(t, 1000, results, 3, true, 1)
1216 1.15 yamt == 0);
1217 1.18 ad assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1218 1.18 ad false, 1) == 0);
1219 1.18 ad assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1220 1.18 ad true, 1) == 0);
1221 1.15 yamt assert(radix_tree_gang_lookup_tagged_node_reverse(t, 1000, results, 3,
1222 1.18 ad false, 1) == 0);
1223 1.18 ad assert(radix_tree_gang_lookup_tagged_node_reverse(t, 1000, results, 3,
1224 1.18 ad true, 1) == 0);
1225 1.15 yamt assert(radix_tree_empty_tree_p(t));
1226 1.16 yamt assert(radix_tree_empty_tagged_tree_p(t, 1));
1227 1.18 ad assert(radix_tree_empty_tagged_tree_p(t, 2));
1228 1.15 yamt assert(radix_tree_insert_node(t, 0, (void *)0xdeadbea0) == 0);
1229 1.15 yamt assert(!radix_tree_empty_tree_p(t));
1230 1.16 yamt assert(radix_tree_empty_tagged_tree_p(t, 1));
1231 1.18 ad assert(radix_tree_empty_tagged_tree_p(t, 2));
1232 1.15 yamt assert(radix_tree_lookup_node(t, 0) == (void *)0xdeadbea0);
1233 1.15 yamt assert(radix_tree_lookup_node(t, 1000) == NULL);
1234 1.15 yamt memset(results, 0, sizeof(results));
1235 1.18 ad assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 1);
1236 1.18 ad assert(results[0] == (void *)0xdeadbea0);
1237 1.18 ad memset(results, 0, sizeof(results));
1238 1.18 ad assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 1);
1239 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1240 1.18 ad assert(radix_tree_gang_lookup_node(t, 1000, results, 3, false) == 0);
1241 1.18 ad assert(radix_tree_gang_lookup_node(t, 1000, results, 3, true) == 0);
1242 1.15 yamt memset(results, 0, sizeof(results));
1243 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, false) ==
1244 1.18 ad 1);
1245 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1246 1.15 yamt memset(results, 0, sizeof(results));
1247 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, true) ==
1248 1.18 ad 1);
1249 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1250 1.18 ad memset(results, 0, sizeof(results));
1251 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, false)
1252 1.18 ad == 1);
1253 1.18 ad assert(results[0] == (void *)0xdeadbea0);
1254 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, true)
1255 1.18 ad == 0);
1256 1.18 ad assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, false, 1)
1257 1.15 yamt == 0);
1258 1.18 ad assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, true, 1)
1259 1.15 yamt == 0);
1260 1.18 ad assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1261 1.18 ad false, 1) == 0);
1262 1.18 ad assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1263 1.18 ad true, 1) == 0);
1264 1.1 yamt assert(radix_tree_insert_node(t, 1000, (void *)0xdeadbea0) == 0);
1265 1.15 yamt assert(radix_tree_remove_node(t, 0) == (void *)0xdeadbea0);
1266 1.15 yamt assert(!radix_tree_empty_tree_p(t));
1267 1.1 yamt radix_tree_dump(t);
1268 1.15 yamt assert(radix_tree_lookup_node(t, 0) == NULL);
1269 1.15 yamt assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1270 1.15 yamt memset(results, 0, sizeof(results));
1271 1.18 ad assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 1);
1272 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1273 1.18 ad assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 0);
1274 1.15 yamt memset(results, 0, sizeof(results));
1275 1.18 ad assert(radix_tree_gang_lookup_node(t, 1000, results, 3, false) == 1);
1276 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1277 1.15 yamt memset(results, 0, sizeof(results));
1278 1.18 ad assert(radix_tree_gang_lookup_node(t, 1000, results, 3, true) == 1);
1279 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1280 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, false)
1281 1.15 yamt == 0);
1282 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, true)
1283 1.15 yamt == 0);
1284 1.18 ad memset(results, 0, sizeof(results));
1285 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, false)
1286 1.18 ad == 1);
1287 1.18 ad memset(results, 0, sizeof(results));
1288 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, true)
1289 1.18 ad == 1);
1290 1.18 ad assert(results[0] == (void *)0xdeadbea0);
1291 1.18 ad assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, false, 1)
1292 1.18 ad == 0);
1293 1.18 ad assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, true, 1)
1294 1.18 ad == 0);
1295 1.18 ad assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1296 1.18 ad false, 1) == 0);
1297 1.18 ad assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1298 1.18 ad true, 1) == 0);
1299 1.18 ad assert(!radix_tree_get_tag(t, 1000, 1));
1300 1.18 ad assert(!radix_tree_get_tag(t, 1000, 2));
1301 1.18 ad assert(radix_tree_get_tag(t, 1000, 2 | 1) == 0);
1302 1.18 ad assert(radix_tree_empty_tagged_tree_p(t, 1));
1303 1.18 ad assert(radix_tree_empty_tagged_tree_p(t, 2));
1304 1.18 ad radix_tree_set_tag(t, 1000, 2);
1305 1.1 yamt assert(!radix_tree_get_tag(t, 1000, 1));
1306 1.18 ad assert(radix_tree_get_tag(t, 1000, 2));
1307 1.18 ad assert(radix_tree_get_tag(t, 1000, 2 | 1) == 2);
1308 1.16 yamt assert(radix_tree_empty_tagged_tree_p(t, 1));
1309 1.18 ad assert(!radix_tree_empty_tagged_tree_p(t, 2));
1310 1.1 yamt radix_tree_dump(t);
1311 1.1 yamt assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1312 1.1 yamt assert(radix_tree_insert_node(t, 0, (void *)0xbea0) == 0);
1313 1.1 yamt radix_tree_dump(t);
1314 1.1 yamt assert(radix_tree_lookup_node(t, 0) == (void *)0xbea0);
1315 1.1 yamt assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1316 1.1 yamt assert(radix_tree_insert_node(t, UINT64_C(10000000000), (void *)0xdea0)
1317 1.1 yamt == 0);
1318 1.1 yamt radix_tree_dump(t);
1319 1.1 yamt assert(radix_tree_lookup_node(t, 0) == (void *)0xbea0);
1320 1.1 yamt assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1321 1.1 yamt assert(radix_tree_lookup_node(t, UINT64_C(10000000000)) ==
1322 1.1 yamt (void *)0xdea0);
1323 1.1 yamt radix_tree_dump(t);
1324 1.18 ad assert(!radix_tree_get_tag(t, 0, 2));
1325 1.18 ad assert(radix_tree_get_tag(t, 1000, 2));
1326 1.1 yamt assert(!radix_tree_get_tag(t, UINT64_C(10000000000), 1));
1327 1.27 msaitoh radix_tree_set_tag(t, 0, 2);
1328 1.18 ad radix_tree_set_tag(t, UINT64_C(10000000000), 2);
1329 1.1 yamt radix_tree_dump(t);
1330 1.18 ad assert(radix_tree_get_tag(t, 0, 2));
1331 1.18 ad assert(radix_tree_get_tag(t, 1000, 2));
1332 1.18 ad assert(radix_tree_get_tag(t, UINT64_C(10000000000), 2));
1333 1.27 msaitoh radix_tree_clear_tag(t, 0, 2);
1334 1.18 ad radix_tree_clear_tag(t, UINT64_C(10000000000), 2);
1335 1.1 yamt radix_tree_dump(t);
1336 1.18 ad assert(!radix_tree_get_tag(t, 0, 2));
1337 1.18 ad assert(radix_tree_get_tag(t, 1000, 2));
1338 1.18 ad assert(!radix_tree_get_tag(t, UINT64_C(10000000000), 2));
1339 1.1 yamt radix_tree_dump(t);
1340 1.1 yamt assert(radix_tree_replace_node(t, 1000, (void *)0x12345678) ==
1341 1.1 yamt (void *)0xdeadbea0);
1342 1.18 ad assert(!radix_tree_get_tag(t, 1000, 1));
1343 1.18 ad assert(radix_tree_get_tag(t, 1000, 2));
1344 1.18 ad assert(radix_tree_get_tag(t, 1000, 2 | 1) == 2);
1345 1.18 ad memset(results, 0, sizeof(results));
1346 1.18 ad assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 3);
1347 1.1 yamt assert(results[0] == (void *)0xbea0);
1348 1.1 yamt assert(results[1] == (void *)0x12345678);
1349 1.1 yamt assert(results[2] == (void *)0xdea0);
1350 1.18 ad memset(results, 0, sizeof(results));
1351 1.18 ad assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 1);
1352 1.18 ad assert(results[0] == (void *)0xbea0);
1353 1.18 ad memset(results, 0, sizeof(results));
1354 1.18 ad assert(radix_tree_gang_lookup_node(t, 1, results, 3, false) == 2);
1355 1.1 yamt assert(results[0] == (void *)0x12345678);
1356 1.1 yamt assert(results[1] == (void *)0xdea0);
1357 1.18 ad assert(radix_tree_gang_lookup_node(t, 1, results, 3, true) == 0);
1358 1.18 ad memset(results, 0, sizeof(results));
1359 1.18 ad assert(radix_tree_gang_lookup_node(t, 1001, results, 3, false) == 1);
1360 1.1 yamt assert(results[0] == (void *)0xdea0);
1361 1.18 ad assert(radix_tree_gang_lookup_node(t, 1001, results, 3, true) == 0);
1362 1.18 ad assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000001), results, 3,
1363 1.18 ad false) == 0);
1364 1.18 ad assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000001), results, 3,
1365 1.18 ad true) == 0);
1366 1.18 ad assert(radix_tree_gang_lookup_node(t, UINT64_C(1000000000000), results,
1367 1.18 ad 3, false) == 0);
1368 1.1 yamt assert(radix_tree_gang_lookup_node(t, UINT64_C(1000000000000), results,
1369 1.18 ad 3, true) == 0);
1370 1.18 ad memset(results, 0, sizeof(results));
1371 1.18 ad assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 100, false, 2)
1372 1.18 ad == 1);
1373 1.1 yamt assert(results[0] == (void *)0x12345678);
1374 1.18 ad assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 100, true, 2)
1375 1.18 ad == 0);
1376 1.1 yamt assert(entry_tagmask(t->t_root) != 0);
1377 1.1 yamt assert(radix_tree_remove_node(t, 1000) == (void *)0x12345678);
1378 1.1 yamt assert(entry_tagmask(t->t_root) == 0);
1379 1.1 yamt radix_tree_dump(t);
1380 1.18 ad assert(radix_tree_insert_node(t, UINT64_C(10000000001), (void *)0xfff0)
1381 1.18 ad == 0);
1382 1.18 ad memset(results, 0, sizeof(results));
1383 1.18 ad assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000000), results, 3,
1384 1.18 ad false) == 2);
1385 1.18 ad assert(results[0] == (void *)0xdea0);
1386 1.18 ad assert(results[1] == (void *)0xfff0);
1387 1.18 ad memset(results, 0, sizeof(results));
1388 1.18 ad assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000000), results, 3,
1389 1.18 ad true) == 2);
1390 1.18 ad assert(results[0] == (void *)0xdea0);
1391 1.18 ad assert(results[1] == (void *)0xfff0);
1392 1.18 ad memset(results, 0, sizeof(results));
1393 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, UINT64_C(10000000001),
1394 1.18 ad results, 3, false) == 3);
1395 1.18 ad assert(results[0] == (void *)0xfff0);
1396 1.18 ad assert(results[1] == (void *)0xdea0);
1397 1.18 ad assert(results[2] == (void *)0xbea0);
1398 1.18 ad memset(results, 0, sizeof(results));
1399 1.18 ad assert(radix_tree_gang_lookup_node_reverse(t, UINT64_C(10000000001),
1400 1.18 ad results, 3, true) == 2);
1401 1.18 ad assert(results[0] == (void *)0xfff0);
1402 1.18 ad assert(results[1] == (void *)0xdea0);
1403 1.1 yamt assert(radix_tree_remove_node(t, UINT64_C(10000000000)) ==
1404 1.1 yamt (void *)0xdea0);
1405 1.18 ad assert(radix_tree_remove_node(t, UINT64_C(10000000001)) ==
1406 1.18 ad (void *)0xfff0);
1407 1.1 yamt radix_tree_dump(t);
1408 1.1 yamt assert(radix_tree_remove_node(t, 0) == (void *)0xbea0);
1409 1.1 yamt radix_tree_dump(t);
1410 1.1 yamt radix_tree_fini_tree(t);
1411 1.1 yamt }
1412 1.1 yamt
1413 1.1 yamt #include <sys/time.h>
1414 1.1 yamt
1415 1.1 yamt struct testnode {
1416 1.1 yamt uint64_t idx;
1417 1.12 yamt bool tagged[RADIX_TREE_TAG_ID_MAX];
1418 1.1 yamt };
1419 1.1 yamt
1420 1.1 yamt static void
1421 1.11 yamt printops(const char *title, const char *name, int tag, unsigned int n,
1422 1.11 yamt const struct timeval *stv, const struct timeval *etv)
1423 1.1 yamt {
1424 1.1 yamt uint64_t s = stv->tv_sec * 1000000 + stv->tv_usec;
1425 1.1 yamt uint64_t e = etv->tv_sec * 1000000 + etv->tv_usec;
1426 1.1 yamt
1427 1.11 yamt printf("RESULT %s %s %d %lf op/s\n", title, name, tag,
1428 1.11 yamt (double)n / (e - s) * 1000000);
1429 1.1 yamt }
1430 1.1 yamt
1431 1.1 yamt #define TEST2_GANG_LOOKUP_NODES 16
1432 1.1 yamt
1433 1.1 yamt static bool
1434 1.18 ad test2_should_tag(unsigned int i, unsigned int tagid)
1435 1.1 yamt {
1436 1.1 yamt
1437 1.1 yamt if (tagid == 0) {
1438 1.18 ad return (i % 4) == 0; /* 25% */
1439 1.1 yamt } else {
1440 1.11 yamt return (i % 7) == 0; /* 14% */
1441 1.1 yamt }
1442 1.18 ad return 1;
1443 1.18 ad }
1444 1.18 ad
1445 1.18 ad static void
1446 1.18 ad check_tag_count(const unsigned int *ntagged, unsigned int tagmask,
1447 1.18 ad unsigned int count)
1448 1.18 ad {
1449 1.18 ad unsigned int tag;
1450 1.18 ad
1451 1.18 ad for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1452 1.18 ad if ((tagmask & (1 << tag)) == 0) {
1453 1.18 ad continue;
1454 1.18 ad }
1455 1.18 ad if (((tagmask - 1) & tagmask) == 0) {
1456 1.18 ad assert(count == ntagged[tag]);
1457 1.18 ad } else {
1458 1.18 ad assert(count >= ntagged[tag]);
1459 1.18 ad }
1460 1.18 ad }
1461 1.1 yamt }
1462 1.1 yamt
1463 1.1 yamt static void
1464 1.11 yamt test2(const char *title, bool dense)
1465 1.1 yamt {
1466 1.1 yamt struct radix_tree s;
1467 1.1 yamt struct radix_tree *t = &s;
1468 1.1 yamt struct testnode *n;
1469 1.1 yamt unsigned int i;
1470 1.1 yamt unsigned int nnodes = 100000;
1471 1.1 yamt unsigned int removed;
1472 1.18 ad unsigned int tag;
1473 1.18 ad unsigned int tagmask;
1474 1.1 yamt unsigned int ntagged[RADIX_TREE_TAG_ID_MAX];
1475 1.1 yamt struct testnode *nodes;
1476 1.1 yamt struct timeval stv;
1477 1.1 yamt struct timeval etv;
1478 1.1 yamt
1479 1.1 yamt nodes = malloc(nnodes * sizeof(*nodes));
1480 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1481 1.1 yamt ntagged[tag] = 0;
1482 1.1 yamt }
1483 1.1 yamt radix_tree_init_tree(t);
1484 1.1 yamt for (i = 0; i < nnodes; i++) {
1485 1.1 yamt n = &nodes[i];
1486 1.1 yamt n->idx = random();
1487 1.1 yamt if (sizeof(long) == 4) {
1488 1.1 yamt n->idx <<= 32;
1489 1.1 yamt n->idx |= (uint32_t)random();
1490 1.1 yamt }
1491 1.1 yamt if (dense) {
1492 1.1 yamt n->idx %= nnodes * 2;
1493 1.1 yamt }
1494 1.1 yamt while (radix_tree_lookup_node(t, n->idx) != NULL) {
1495 1.1 yamt n->idx++;
1496 1.1 yamt }
1497 1.1 yamt radix_tree_insert_node(t, n->idx, n);
1498 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1499 1.18 ad tagmask = 1 << tag;
1500 1.18 ad
1501 1.12 yamt n->tagged[tag] = test2_should_tag(i, tag);
1502 1.12 yamt if (n->tagged[tag]) {
1503 1.18 ad radix_tree_set_tag(t, n->idx, tagmask);
1504 1.1 yamt ntagged[tag]++;
1505 1.1 yamt }
1506 1.18 ad assert((n->tagged[tag] ? tagmask : 0) ==
1507 1.18 ad radix_tree_get_tag(t, n->idx, tagmask));
1508 1.1 yamt }
1509 1.1 yamt }
1510 1.1 yamt
1511 1.1 yamt gettimeofday(&stv, NULL);
1512 1.1 yamt for (i = 0; i < nnodes; i++) {
1513 1.1 yamt n = &nodes[i];
1514 1.1 yamt assert(radix_tree_lookup_node(t, n->idx) == n);
1515 1.1 yamt }
1516 1.1 yamt gettimeofday(&etv, NULL);
1517 1.11 yamt printops(title, "lookup", 0, nnodes, &stv, &etv);
1518 1.1 yamt
1519 1.18 ad for (tagmask = 1; tagmask <= RADIX_TREE_TAG_MASK; tagmask ++) {
1520 1.12 yamt unsigned int count = 0;
1521 1.12 yamt
1522 1.1 yamt gettimeofday(&stv, NULL);
1523 1.1 yamt for (i = 0; i < nnodes; i++) {
1524 1.18 ad unsigned int tagged;
1525 1.12 yamt
1526 1.1 yamt n = &nodes[i];
1527 1.18 ad tagged = radix_tree_get_tag(t, n->idx, tagmask);
1528 1.18 ad assert((tagged & ~tagmask) == 0);
1529 1.18 ad for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1530 1.18 ad assert((tagmask & (1 << tag)) == 0 ||
1531 1.18 ad n->tagged[tag] == !!(tagged & (1 << tag)));
1532 1.18 ad }
1533 1.12 yamt if (tagged) {
1534 1.12 yamt count++;
1535 1.12 yamt }
1536 1.1 yamt }
1537 1.1 yamt gettimeofday(&etv, NULL);
1538 1.18 ad check_tag_count(ntagged, tagmask, count);
1539 1.18 ad printops(title, "get_tag", tagmask, nnodes, &stv, &etv);
1540 1.1 yamt }
1541 1.1 yamt
1542 1.1 yamt gettimeofday(&stv, NULL);
1543 1.1 yamt for (i = 0; i < nnodes; i++) {
1544 1.1 yamt n = &nodes[i];
1545 1.1 yamt radix_tree_remove_node(t, n->idx);
1546 1.1 yamt }
1547 1.1 yamt gettimeofday(&etv, NULL);
1548 1.11 yamt printops(title, "remove", 0, nnodes, &stv, &etv);
1549 1.1 yamt
1550 1.1 yamt gettimeofday(&stv, NULL);
1551 1.1 yamt for (i = 0; i < nnodes; i++) {
1552 1.1 yamt n = &nodes[i];
1553 1.1 yamt radix_tree_insert_node(t, n->idx, n);
1554 1.1 yamt }
1555 1.1 yamt gettimeofday(&etv, NULL);
1556 1.11 yamt printops(title, "insert", 0, nnodes, &stv, &etv);
1557 1.1 yamt
1558 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1559 1.18 ad tagmask = 1 << tag;
1560 1.18 ad
1561 1.1 yamt ntagged[tag] = 0;
1562 1.1 yamt gettimeofday(&stv, NULL);
1563 1.1 yamt for (i = 0; i < nnodes; i++) {
1564 1.1 yamt n = &nodes[i];
1565 1.12 yamt if (n->tagged[tag]) {
1566 1.18 ad radix_tree_set_tag(t, n->idx, tagmask);
1567 1.1 yamt ntagged[tag]++;
1568 1.1 yamt }
1569 1.1 yamt }
1570 1.1 yamt gettimeofday(&etv, NULL);
1571 1.11 yamt printops(title, "set_tag", tag, ntagged[tag], &stv, &etv);
1572 1.1 yamt }
1573 1.1 yamt
1574 1.1 yamt gettimeofday(&stv, NULL);
1575 1.1 yamt {
1576 1.1 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1577 1.1 yamt uint64_t nextidx;
1578 1.1 yamt unsigned int nfound;
1579 1.1 yamt unsigned int total;
1580 1.1 yamt
1581 1.1 yamt nextidx = 0;
1582 1.1 yamt total = 0;
1583 1.1 yamt while ((nfound = radix_tree_gang_lookup_node(t, nextidx,
1584 1.18 ad (void *)results, __arraycount(results), false)) > 0) {
1585 1.1 yamt nextidx = results[nfound - 1]->idx + 1;
1586 1.1 yamt total += nfound;
1587 1.15 yamt if (nextidx == 0) {
1588 1.15 yamt break;
1589 1.15 yamt }
1590 1.1 yamt }
1591 1.1 yamt assert(total == nnodes);
1592 1.1 yamt }
1593 1.1 yamt gettimeofday(&etv, NULL);
1594 1.11 yamt printops(title, "ganglookup", 0, nnodes, &stv, &etv);
1595 1.1 yamt
1596 1.15 yamt gettimeofday(&stv, NULL);
1597 1.15 yamt {
1598 1.15 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1599 1.15 yamt uint64_t nextidx;
1600 1.15 yamt unsigned int nfound;
1601 1.15 yamt unsigned int total;
1602 1.15 yamt
1603 1.15 yamt nextidx = UINT64_MAX;
1604 1.15 yamt total = 0;
1605 1.15 yamt while ((nfound = radix_tree_gang_lookup_node_reverse(t, nextidx,
1606 1.18 ad (void *)results, __arraycount(results), false)) > 0) {
1607 1.15 yamt nextidx = results[nfound - 1]->idx - 1;
1608 1.15 yamt total += nfound;
1609 1.15 yamt if (nextidx == UINT64_MAX) {
1610 1.15 yamt break;
1611 1.15 yamt }
1612 1.15 yamt }
1613 1.15 yamt assert(total == nnodes);
1614 1.15 yamt }
1615 1.15 yamt gettimeofday(&etv, NULL);
1616 1.15 yamt printops(title, "ganglookup_reverse", 0, nnodes, &stv, &etv);
1617 1.15 yamt
1618 1.18 ad for (tagmask = 1; tagmask <= RADIX_TREE_TAG_MASK; tagmask ++) {
1619 1.18 ad unsigned int total = 0;
1620 1.18 ad
1621 1.1 yamt gettimeofday(&stv, NULL);
1622 1.1 yamt {
1623 1.1 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1624 1.1 yamt uint64_t nextidx;
1625 1.1 yamt unsigned int nfound;
1626 1.1 yamt
1627 1.1 yamt nextidx = 0;
1628 1.1 yamt while ((nfound = radix_tree_gang_lookup_tagged_node(t,
1629 1.1 yamt nextidx, (void *)results, __arraycount(results),
1630 1.18 ad false, tagmask)) > 0) {
1631 1.1 yamt nextidx = results[nfound - 1]->idx + 1;
1632 1.1 yamt total += nfound;
1633 1.1 yamt }
1634 1.1 yamt }
1635 1.1 yamt gettimeofday(&etv, NULL);
1636 1.18 ad check_tag_count(ntagged, tagmask, total);
1637 1.18 ad assert(tagmask != 0 || total == 0);
1638 1.18 ad printops(title, "ganglookup_tag", tagmask, total, &stv, &etv);
1639 1.1 yamt }
1640 1.1 yamt
1641 1.18 ad for (tagmask = 1; tagmask <= RADIX_TREE_TAG_MASK; tagmask ++) {
1642 1.18 ad unsigned int total = 0;
1643 1.18 ad
1644 1.15 yamt gettimeofday(&stv, NULL);
1645 1.15 yamt {
1646 1.15 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1647 1.15 yamt uint64_t nextidx;
1648 1.15 yamt unsigned int nfound;
1649 1.15 yamt
1650 1.15 yamt nextidx = UINT64_MAX;
1651 1.15 yamt while ((nfound =
1652 1.15 yamt radix_tree_gang_lookup_tagged_node_reverse(t,
1653 1.15 yamt nextidx, (void *)results, __arraycount(results),
1654 1.18 ad false, tagmask)) > 0) {
1655 1.15 yamt nextidx = results[nfound - 1]->idx - 1;
1656 1.15 yamt total += nfound;
1657 1.15 yamt if (nextidx == UINT64_MAX) {
1658 1.15 yamt break;
1659 1.15 yamt }
1660 1.15 yamt }
1661 1.15 yamt }
1662 1.15 yamt gettimeofday(&etv, NULL);
1663 1.18 ad check_tag_count(ntagged, tagmask, total);
1664 1.18 ad assert(tagmask != 0 || total == 0);
1665 1.18 ad printops(title, "ganglookup_tag_reverse", tagmask, total,
1666 1.15 yamt &stv, &etv);
1667 1.15 yamt }
1668 1.15 yamt
1669 1.1 yamt removed = 0;
1670 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1671 1.1 yamt unsigned int total;
1672 1.1 yamt
1673 1.1 yamt total = 0;
1674 1.18 ad tagmask = 1 << tag;
1675 1.1 yamt gettimeofday(&stv, NULL);
1676 1.1 yamt {
1677 1.1 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1678 1.1 yamt uint64_t nextidx;
1679 1.1 yamt unsigned int nfound;
1680 1.1 yamt
1681 1.1 yamt nextidx = 0;
1682 1.1 yamt while ((nfound = radix_tree_gang_lookup_tagged_node(t,
1683 1.1 yamt nextidx, (void *)results, __arraycount(results),
1684 1.18 ad false, tagmask)) > 0) {
1685 1.1 yamt for (i = 0; i < nfound; i++) {
1686 1.1 yamt radix_tree_remove_node(t,
1687 1.1 yamt results[i]->idx);
1688 1.1 yamt }
1689 1.1 yamt nextidx = results[nfound - 1]->idx + 1;
1690 1.1 yamt total += nfound;
1691 1.15 yamt if (nextidx == 0) {
1692 1.15 yamt break;
1693 1.15 yamt }
1694 1.1 yamt }
1695 1.18 ad }
1696 1.18 ad gettimeofday(&etv, NULL);
1697 1.18 ad if (tag == 0) {
1698 1.18 ad check_tag_count(ntagged, tagmask, total);
1699 1.18 ad } else {
1700 1.1 yamt assert(total <= ntagged[tag]);
1701 1.1 yamt }
1702 1.18 ad printops(title, "ganglookup_tag+remove", tagmask, total, &stv,
1703 1.11 yamt &etv);
1704 1.1 yamt removed += total;
1705 1.1 yamt }
1706 1.1 yamt
1707 1.1 yamt gettimeofday(&stv, NULL);
1708 1.1 yamt {
1709 1.1 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1710 1.1 yamt uint64_t nextidx;
1711 1.1 yamt unsigned int nfound;
1712 1.1 yamt unsigned int total;
1713 1.1 yamt
1714 1.1 yamt nextidx = 0;
1715 1.1 yamt total = 0;
1716 1.1 yamt while ((nfound = radix_tree_gang_lookup_node(t, nextidx,
1717 1.18 ad (void *)results, __arraycount(results), false)) > 0) {
1718 1.1 yamt for (i = 0; i < nfound; i++) {
1719 1.1 yamt assert(results[i] == radix_tree_remove_node(t,
1720 1.1 yamt results[i]->idx));
1721 1.1 yamt }
1722 1.1 yamt nextidx = results[nfound - 1]->idx + 1;
1723 1.1 yamt total += nfound;
1724 1.15 yamt if (nextidx == 0) {
1725 1.15 yamt break;
1726 1.15 yamt }
1727 1.1 yamt }
1728 1.1 yamt assert(total == nnodes - removed);
1729 1.1 yamt }
1730 1.1 yamt gettimeofday(&etv, NULL);
1731 1.11 yamt printops(title, "ganglookup+remove", 0, nnodes - removed, &stv, &etv);
1732 1.1 yamt
1733 1.16 yamt assert(radix_tree_empty_tree_p(t));
1734 1.18 ad for (tagmask = 1; tagmask <= RADIX_TREE_TAG_MASK; tagmask ++) {
1735 1.18 ad assert(radix_tree_empty_tagged_tree_p(t, tagmask));
1736 1.18 ad }
1737 1.1 yamt radix_tree_fini_tree(t);
1738 1.1 yamt free(nodes);
1739 1.1 yamt }
1740 1.1 yamt
1741 1.1 yamt int
1742 1.1 yamt main(int argc, char *argv[])
1743 1.1 yamt {
1744 1.1 yamt
1745 1.1 yamt test1();
1746 1.11 yamt test2("dense", true);
1747 1.11 yamt test2("sparse", false);
1748 1.1 yamt return 0;
1749 1.1 yamt }
1750 1.1 yamt
1751 1.1 yamt #endif /* defined(UNITTEST) */
1752