radixtree.c revision 1.17.2.4 1 1.17.2.3 yamt /* $NetBSD: radixtree.c,v 1.17.2.4 2012/08/01 21:09:27 yamt Exp $ */
2 1.1 yamt
3 1.1 yamt /*-
4 1.17.2.2 yamt * Copyright (c)2011,2012 YAMAMOTO Takashi,
5 1.1 yamt * All rights reserved.
6 1.1 yamt *
7 1.1 yamt * Redistribution and use in source and binary forms, with or without
8 1.1 yamt * modification, are permitted provided that the following conditions
9 1.1 yamt * are met:
10 1.1 yamt * 1. Redistributions of source code must retain the above copyright
11 1.1 yamt * notice, this list of conditions and the following disclaimer.
12 1.1 yamt * 2. Redistributions in binary form must reproduce the above copyright
13 1.1 yamt * notice, this list of conditions and the following disclaimer in the
14 1.1 yamt * documentation and/or other materials provided with the distribution.
15 1.1 yamt *
16 1.1 yamt * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17 1.1 yamt * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 1.1 yamt * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 1.1 yamt * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20 1.1 yamt * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 1.1 yamt * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 1.1 yamt * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 1.1 yamt * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 1.1 yamt * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 1.1 yamt * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 1.1 yamt * SUCH DAMAGE.
27 1.1 yamt */
28 1.1 yamt
29 1.1 yamt /*
30 1.17 yamt * radixtree.c
31 1.1 yamt *
32 1.17.2.2 yamt * Overview:
33 1.17.2.2 yamt *
34 1.17.2.2 yamt * This is an implementation of radix tree, whose keys are uint64_t and leafs
35 1.17 yamt * are user provided pointers.
36 1.17 yamt *
37 1.17.2.2 yamt * Leaf nodes are just void * and this implementation doesn't care about
38 1.17.2.2 yamt * what they actually point to. However, this implementation has an assumption
39 1.17.2.2 yamt * about their alignment. Specifically, this implementation assumes that their
40 1.17.2.2 yamt * 2 LSBs are always zero and uses them for internal accounting.
41 1.17.2.2 yamt *
42 1.17.2.2 yamt * Intermediate nodes and memory allocation:
43 1.17.2.2 yamt *
44 1.17.2.2 yamt * Intermediate nodes are automatically allocated and freed internally and
45 1.17.2.2 yamt * basically users don't need to care about them. The allocation is done via
46 1.17.2.2 yamt * pool_cache_get(9) for _KERNEL, malloc(3) for userland, and alloc() for
47 1.17.2.2 yamt * _STANDALONE environment. Only radix_tree_insert_node function can allocatei
48 1.17.2.2 yamt * memory for intermediate nodes and thus can fail for ENOMEM.
49 1.17.2.2 yamt *
50 1.17.2.2 yamt * Efficiency:
51 1.17.2.2 yamt *
52 1.17.2.2 yamt * It's designed to work efficiently with dense index distribution.
53 1.17.2.2 yamt * The memory consumption (number of necessary intermediate nodes) heavily
54 1.17.2.2 yamt * depends on the index distribution. Basically, more dense index distribution
55 1.17.2.2 yamt * consumes less nodes per item. Approximately,
56 1.17.2.2 yamt * - the best case: about RADIX_TREE_PTR_PER_NODE items per intermediate node.
57 1.17.2.2 yamt * - the worst case: RADIX_TREE_MAX_HEIGHT intermediate nodes per item.
58 1.17.2.2 yamt *
59 1.17.2.3 yamt * The height of tree is dynamic. It's smaller if only small index values are
60 1.17.2.3 yamt * used. As an extreme case, if only index 0 is used, the corresponding value
61 1.17.2.3 yamt * is directly stored in the root of the tree (struct radix_tree) without
62 1.17.2.3 yamt * allocating any intermediate nodes.
63 1.17.2.3 yamt *
64 1.17.2.2 yamt * Gang lookup:
65 1.17 yamt *
66 1.17.2.2 yamt * This implementation provides a way to scan many nodes quickly via
67 1.17 yamt * radix_tree_gang_lookup_node function and its varients.
68 1.17 yamt *
69 1.17.2.2 yamt * Tags:
70 1.17.2.2 yamt *
71 1.17.2.2 yamt * This implementation provides tagging functionality, which allows quick
72 1.17.2.2 yamt * scanning of a subset of leaf nodes. Leaf nodes are untagged when inserted
73 1.17.2.2 yamt * into the tree and can be tagged by radix_tree_set_tag function.
74 1.17.2.2 yamt * radix_tree_gang_lookup_tagged_node function and its variants returns only
75 1.17.2.2 yamt * leaf nodes with the given tag. To reduce amount of nodes to visit for
76 1.17 yamt * these functions, this implementation keeps tagging information in internal
77 1.17 yamt * intermediate nodes and quickly skips uninterested parts of a tree.
78 1.17.2.4 yamt *
79 1.17.2.4 yamt * A tree has RADIX_TREE_TAG_ID_MAX independent tag spaces, each of which are
80 1.17.2.4 yamt * identified by an zero-origin numbers, tagid. For the current implementation,
81 1.17.2.4 yamt * RADIX_TREE_TAG_ID_MAX is 2. A set of tags is described as a bitmask tagmask,
82 1.17.2.4 yamt * which is a bitwise OR of (1 << tagid).
83 1.1 yamt */
84 1.1 yamt
85 1.1 yamt #include <sys/cdefs.h>
86 1.1 yamt
87 1.2 yamt #if defined(_KERNEL) || defined(_STANDALONE)
88 1.17.2.3 yamt __KERNEL_RCSID(0, "$NetBSD: radixtree.c,v 1.17.2.4 2012/08/01 21:09:27 yamt Exp $");
89 1.1 yamt #include <sys/param.h>
90 1.3 yamt #include <sys/errno.h>
91 1.1 yamt #include <sys/pool.h>
92 1.1 yamt #include <sys/radixtree.h>
93 1.3 yamt #include <lib/libkern/libkern.h>
94 1.3 yamt #if defined(_STANDALONE)
95 1.3 yamt #include <lib/libsa/stand.h>
96 1.3 yamt #endif /* defined(_STANDALONE) */
97 1.2 yamt #else /* defined(_KERNEL) || defined(_STANDALONE) */
98 1.17.2.3 yamt __RCSID("$NetBSD: radixtree.c,v 1.17.2.4 2012/08/01 21:09:27 yamt Exp $");
99 1.1 yamt #include <assert.h>
100 1.1 yamt #include <errno.h>
101 1.1 yamt #include <stdbool.h>
102 1.1 yamt #include <stdlib.h>
103 1.8 yamt #include <string.h>
104 1.1 yamt #if 1
105 1.1 yamt #define KASSERT assert
106 1.1 yamt #else
107 1.1 yamt #define KASSERT(a) /* nothing */
108 1.1 yamt #endif
109 1.2 yamt #endif /* defined(_KERNEL) || defined(_STANDALONE) */
110 1.1 yamt
111 1.1 yamt #include <sys/radixtree.h>
112 1.1 yamt
113 1.1 yamt #define RADIX_TREE_BITS_PER_HEIGHT 4 /* XXX tune */
114 1.1 yamt #define RADIX_TREE_PTR_PER_NODE (1 << RADIX_TREE_BITS_PER_HEIGHT)
115 1.1 yamt #define RADIX_TREE_MAX_HEIGHT (64 / RADIX_TREE_BITS_PER_HEIGHT)
116 1.15 yamt #define RADIX_TREE_INVALID_HEIGHT (RADIX_TREE_MAX_HEIGHT + 1)
117 1.2 yamt __CTASSERT((64 % RADIX_TREE_BITS_PER_HEIGHT) == 0);
118 1.1 yamt
119 1.2 yamt __CTASSERT(((1 << RADIX_TREE_TAG_ID_MAX) & (sizeof(int) - 1)) == 0);
120 1.1 yamt #define RADIX_TREE_TAG_MASK ((1 << RADIX_TREE_TAG_ID_MAX) - 1)
121 1.1 yamt
122 1.1 yamt static inline void *
123 1.1 yamt entry_ptr(void *p)
124 1.1 yamt {
125 1.1 yamt
126 1.1 yamt return (void *)((uintptr_t)p & ~RADIX_TREE_TAG_MASK);
127 1.1 yamt }
128 1.1 yamt
129 1.1 yamt static inline unsigned int
130 1.1 yamt entry_tagmask(void *p)
131 1.1 yamt {
132 1.1 yamt
133 1.1 yamt return (uintptr_t)p & RADIX_TREE_TAG_MASK;
134 1.1 yamt }
135 1.1 yamt
136 1.1 yamt static inline void *
137 1.1 yamt entry_compose(void *p, unsigned int tagmask)
138 1.1 yamt {
139 1.1 yamt
140 1.1 yamt return (void *)((uintptr_t)p | tagmask);
141 1.1 yamt }
142 1.1 yamt
143 1.1 yamt static inline bool
144 1.1 yamt entry_match_p(void *p, unsigned int tagmask)
145 1.1 yamt {
146 1.1 yamt
147 1.1 yamt KASSERT(entry_ptr(p) != NULL || entry_tagmask(p) == 0);
148 1.1 yamt if (p == NULL) {
149 1.1 yamt return false;
150 1.1 yamt }
151 1.1 yamt if (tagmask == 0) {
152 1.1 yamt return true;
153 1.1 yamt }
154 1.1 yamt return (entry_tagmask(p) & tagmask) != 0;
155 1.1 yamt }
156 1.1 yamt
157 1.1 yamt /*
158 1.1 yamt * radix_tree_node: an intermediate node
159 1.1 yamt *
160 1.1 yamt * we don't care the type of leaf nodes. they are just void *.
161 1.1 yamt */
162 1.1 yamt
163 1.1 yamt struct radix_tree_node {
164 1.1 yamt void *n_ptrs[RADIX_TREE_PTR_PER_NODE];
165 1.1 yamt unsigned int n_nptrs; /* # of non-NULL pointers in n_ptrs */
166 1.1 yamt };
167 1.1 yamt
168 1.7 yamt /*
169 1.7 yamt * any_children_tagmask:
170 1.7 yamt *
171 1.7 yamt * return OR'ed tagmask of the given node's children.
172 1.7 yamt */
173 1.7 yamt
174 1.1 yamt static unsigned int
175 1.13 yamt any_children_tagmask(const struct radix_tree_node *n)
176 1.1 yamt {
177 1.1 yamt unsigned int mask;
178 1.1 yamt int i;
179 1.1 yamt
180 1.1 yamt mask = 0;
181 1.1 yamt for (i = 0; i < RADIX_TREE_PTR_PER_NODE; i++) {
182 1.1 yamt mask |= (unsigned int)(uintptr_t)n->n_ptrs[i];
183 1.1 yamt }
184 1.1 yamt return mask & RADIX_TREE_TAG_MASK;
185 1.1 yamt }
186 1.1 yamt
187 1.1 yamt /*
188 1.1 yamt * p_refs[0].pptr == &t->t_root
189 1.1 yamt * :
190 1.1 yamt * p_refs[n].pptr == &(*p_refs[n-1])->n_ptrs[x]
191 1.1 yamt * :
192 1.1 yamt * :
193 1.1 yamt * p_refs[t->t_height].pptr == &leaf_pointer
194 1.1 yamt */
195 1.1 yamt
196 1.1 yamt struct radix_tree_path {
197 1.1 yamt struct radix_tree_node_ref {
198 1.1 yamt void **pptr;
199 1.1 yamt } p_refs[RADIX_TREE_MAX_HEIGHT + 1]; /* +1 for the root ptr */
200 1.15 yamt /*
201 1.15 yamt * p_lastidx is either the index of the last valid element of p_refs[]
202 1.15 yamt * or RADIX_TREE_INVALID_HEIGHT.
203 1.15 yamt * RADIX_TREE_INVALID_HEIGHT means that radix_tree_lookup_ptr found
204 1.15 yamt * that the height of the tree is not enough to cover the given index.
205 1.15 yamt */
206 1.10 yamt unsigned int p_lastidx;
207 1.1 yamt };
208 1.1 yamt
209 1.1 yamt static inline void **
210 1.13 yamt path_pptr(const struct radix_tree *t, const struct radix_tree_path *p,
211 1.1 yamt unsigned int height)
212 1.1 yamt {
213 1.1 yamt
214 1.1 yamt KASSERT(height <= t->t_height);
215 1.1 yamt return p->p_refs[height].pptr;
216 1.1 yamt }
217 1.1 yamt
218 1.1 yamt static inline struct radix_tree_node *
219 1.13 yamt path_node(const struct radix_tree * t, const struct radix_tree_path *p,
220 1.13 yamt unsigned int height)
221 1.1 yamt {
222 1.1 yamt
223 1.1 yamt KASSERT(height <= t->t_height);
224 1.1 yamt return entry_ptr(*path_pptr(t, p, height));
225 1.1 yamt }
226 1.1 yamt
227 1.1 yamt /*
228 1.1 yamt * radix_tree_init_tree:
229 1.1 yamt *
230 1.17.2.2 yamt * Initialize a tree.
231 1.1 yamt */
232 1.1 yamt
233 1.1 yamt void
234 1.1 yamt radix_tree_init_tree(struct radix_tree *t)
235 1.1 yamt {
236 1.1 yamt
237 1.1 yamt t->t_height = 0;
238 1.1 yamt t->t_root = NULL;
239 1.1 yamt }
240 1.1 yamt
241 1.1 yamt /*
242 1.17.2.2 yamt * radix_tree_fini_tree:
243 1.1 yamt *
244 1.17.2.2 yamt * Finish using a tree.
245 1.1 yamt */
246 1.1 yamt
247 1.1 yamt void
248 1.1 yamt radix_tree_fini_tree(struct radix_tree *t)
249 1.1 yamt {
250 1.1 yamt
251 1.1 yamt KASSERT(t->t_root == NULL);
252 1.1 yamt KASSERT(t->t_height == 0);
253 1.1 yamt }
254 1.1 yamt
255 1.17.2.2 yamt /*
256 1.17.2.2 yamt * radix_tree_empty_tree_p:
257 1.17.2.2 yamt *
258 1.17.2.2 yamt * Return if the tree is empty.
259 1.17.2.2 yamt */
260 1.17.2.2 yamt
261 1.9 yamt bool
262 1.9 yamt radix_tree_empty_tree_p(struct radix_tree *t)
263 1.9 yamt {
264 1.9 yamt
265 1.9 yamt return t->t_root == NULL;
266 1.9 yamt }
267 1.9 yamt
268 1.17.2.2 yamt /*
269 1.17.2.2 yamt * radix_tree_empty_tree_p:
270 1.17.2.2 yamt *
271 1.17.2.2 yamt * Return true if the tree has any nodes with the given tag. Otherwise
272 1.17.2.2 yamt * return false.
273 1.17.2.4 yamt *
274 1.17.2.4 yamt * It's illegal to call this function with tagmask 0.
275 1.17.2.2 yamt */
276 1.17.2.2 yamt
277 1.16 yamt bool
278 1.17.2.4 yamt radix_tree_empty_tagged_tree_p(struct radix_tree *t, unsigned int tagmask)
279 1.16 yamt {
280 1.16 yamt
281 1.17.2.4 yamt KASSERT(tagmask != 0);
282 1.16 yamt return (entry_tagmask(t->t_root) & tagmask) == 0;
283 1.16 yamt }
284 1.16 yamt
285 1.3 yamt static void
286 1.3 yamt radix_tree_node_init(struct radix_tree_node *n)
287 1.3 yamt {
288 1.3 yamt
289 1.3 yamt memset(n, 0, sizeof(*n));
290 1.3 yamt }
291 1.3 yamt
292 1.1 yamt #if defined(_KERNEL)
293 1.2 yamt pool_cache_t radix_tree_node_cache __read_mostly;
294 1.1 yamt
295 1.1 yamt static int
296 1.1 yamt radix_tree_node_ctor(void *dummy, void *item, int flags)
297 1.1 yamt {
298 1.1 yamt struct radix_tree_node *n = item;
299 1.1 yamt
300 1.1 yamt KASSERT(dummy == NULL);
301 1.3 yamt radix_tree_node_init(n);
302 1.1 yamt return 0;
303 1.1 yamt }
304 1.1 yamt
305 1.1 yamt /*
306 1.1 yamt * radix_tree_init:
307 1.1 yamt *
308 1.1 yamt * initialize the subsystem.
309 1.1 yamt */
310 1.1 yamt
311 1.1 yamt void
312 1.1 yamt radix_tree_init(void)
313 1.1 yamt {
314 1.1 yamt
315 1.1 yamt radix_tree_node_cache = pool_cache_init(sizeof(struct radix_tree_node),
316 1.1 yamt 0, 0, 0, "radix_tree_node", NULL, IPL_NONE, radix_tree_node_ctor,
317 1.1 yamt NULL, NULL);
318 1.1 yamt KASSERT(radix_tree_node_cache != NULL);
319 1.1 yamt }
320 1.1 yamt #endif /* defined(_KERNEL) */
321 1.1 yamt
322 1.1 yamt static bool __unused
323 1.1 yamt radix_tree_node_clean_p(const struct radix_tree_node *n)
324 1.1 yamt {
325 1.1 yamt unsigned int i;
326 1.1 yamt
327 1.1 yamt if (n->n_nptrs != 0) {
328 1.1 yamt return false;
329 1.1 yamt }
330 1.1 yamt for (i = 0; i < RADIX_TREE_PTR_PER_NODE; i++) {
331 1.1 yamt if (n->n_ptrs[i] != NULL) {
332 1.1 yamt return false;
333 1.1 yamt }
334 1.1 yamt }
335 1.1 yamt return true;
336 1.1 yamt }
337 1.1 yamt
338 1.1 yamt static struct radix_tree_node *
339 1.1 yamt radix_tree_alloc_node(void)
340 1.1 yamt {
341 1.1 yamt struct radix_tree_node *n;
342 1.1 yamt
343 1.1 yamt #if defined(_KERNEL)
344 1.17.2.2 yamt /*
345 1.17.2.2 yamt * note that pool_cache_get can block.
346 1.17.2.2 yamt */
347 1.1 yamt n = pool_cache_get(radix_tree_node_cache, PR_NOWAIT);
348 1.1 yamt #else /* defined(_KERNEL) */
349 1.3 yamt #if defined(_STANDALONE)
350 1.3 yamt n = alloc(sizeof(*n));
351 1.3 yamt #else /* defined(_STANDALONE) */
352 1.3 yamt n = malloc(sizeof(*n));
353 1.3 yamt #endif /* defined(_STANDALONE) */
354 1.3 yamt if (n != NULL) {
355 1.3 yamt radix_tree_node_init(n);
356 1.3 yamt }
357 1.1 yamt #endif /* defined(_KERNEL) */
358 1.1 yamt KASSERT(n == NULL || radix_tree_node_clean_p(n));
359 1.1 yamt return n;
360 1.1 yamt }
361 1.1 yamt
362 1.1 yamt static void
363 1.1 yamt radix_tree_free_node(struct radix_tree_node *n)
364 1.1 yamt {
365 1.1 yamt
366 1.1 yamt KASSERT(radix_tree_node_clean_p(n));
367 1.1 yamt #if defined(_KERNEL)
368 1.1 yamt pool_cache_put(radix_tree_node_cache, n);
369 1.3 yamt #elif defined(_STANDALONE)
370 1.3 yamt dealloc(n, sizeof(*n));
371 1.3 yamt #else
372 1.1 yamt free(n);
373 1.3 yamt #endif
374 1.1 yamt }
375 1.1 yamt
376 1.1 yamt static int
377 1.1 yamt radix_tree_grow(struct radix_tree *t, unsigned int newheight)
378 1.1 yamt {
379 1.1 yamt const unsigned int tagmask = entry_tagmask(t->t_root);
380 1.1 yamt
381 1.1 yamt KASSERT(newheight <= 64 / RADIX_TREE_BITS_PER_HEIGHT);
382 1.1 yamt if (t->t_root == NULL) {
383 1.1 yamt t->t_height = newheight;
384 1.1 yamt return 0;
385 1.1 yamt }
386 1.1 yamt while (t->t_height < newheight) {
387 1.1 yamt struct radix_tree_node *n;
388 1.1 yamt
389 1.1 yamt n = radix_tree_alloc_node();
390 1.1 yamt if (n == NULL) {
391 1.1 yamt /*
392 1.1 yamt * don't bother to revert our changes.
393 1.1 yamt * the caller will likely retry.
394 1.1 yamt */
395 1.1 yamt return ENOMEM;
396 1.1 yamt }
397 1.1 yamt n->n_nptrs = 1;
398 1.1 yamt n->n_ptrs[0] = t->t_root;
399 1.1 yamt t->t_root = entry_compose(n, tagmask);
400 1.1 yamt t->t_height++;
401 1.1 yamt }
402 1.1 yamt return 0;
403 1.1 yamt }
404 1.1 yamt
405 1.5 yamt /*
406 1.5 yamt * radix_tree_lookup_ptr:
407 1.5 yamt *
408 1.5 yamt * an internal helper function used for various exported functions.
409 1.5 yamt *
410 1.5 yamt * return the pointer to store the node for the given index.
411 1.5 yamt *
412 1.5 yamt * if alloc is true, try to allocate the storage. (note for _KERNEL:
413 1.5 yamt * in that case, this function can block.) if the allocation failed or
414 1.5 yamt * alloc is false, return NULL.
415 1.5 yamt *
416 1.5 yamt * if path is not NULL, fill it for the caller's investigation.
417 1.5 yamt *
418 1.5 yamt * if tagmask is not zero, search only for nodes with the tag set.
419 1.15 yamt * note that, however, this function doesn't check the tagmask for the leaf
420 1.15 yamt * pointer. it's a caller's responsibility to investigate the value which
421 1.15 yamt * is pointed by the returned pointer if necessary.
422 1.5 yamt *
423 1.5 yamt * while this function is a bit large, as it's called with some constant
424 1.5 yamt * arguments, inlining might have benefits. anyway, a compiler will decide.
425 1.5 yamt */
426 1.5 yamt
427 1.1 yamt static inline void **
428 1.1 yamt radix_tree_lookup_ptr(struct radix_tree *t, uint64_t idx,
429 1.1 yamt struct radix_tree_path *path, bool alloc, const unsigned int tagmask)
430 1.1 yamt {
431 1.1 yamt struct radix_tree_node *n;
432 1.1 yamt int hshift = RADIX_TREE_BITS_PER_HEIGHT * t->t_height;
433 1.1 yamt int shift;
434 1.1 yamt void **vpp;
435 1.1 yamt const uint64_t mask = (UINT64_C(1) << RADIX_TREE_BITS_PER_HEIGHT) - 1;
436 1.1 yamt struct radix_tree_node_ref *refs = NULL;
437 1.1 yamt
438 1.5 yamt /*
439 1.5 yamt * check unsupported combinations
440 1.5 yamt */
441 1.1 yamt KASSERT(tagmask == 0 || !alloc);
442 1.1 yamt KASSERT(path == NULL || !alloc);
443 1.1 yamt vpp = &t->t_root;
444 1.1 yamt if (path != NULL) {
445 1.1 yamt refs = path->p_refs;
446 1.1 yamt refs->pptr = vpp;
447 1.1 yamt }
448 1.1 yamt n = NULL;
449 1.1 yamt for (shift = 64 - RADIX_TREE_BITS_PER_HEIGHT; shift >= 0;) {
450 1.1 yamt struct radix_tree_node *c;
451 1.1 yamt void *entry;
452 1.1 yamt const uint64_t i = (idx >> shift) & mask;
453 1.1 yamt
454 1.1 yamt if (shift >= hshift) {
455 1.1 yamt unsigned int newheight;
456 1.1 yamt
457 1.1 yamt KASSERT(vpp == &t->t_root);
458 1.1 yamt if (i == 0) {
459 1.1 yamt shift -= RADIX_TREE_BITS_PER_HEIGHT;
460 1.1 yamt continue;
461 1.1 yamt }
462 1.1 yamt if (!alloc) {
463 1.1 yamt if (path != NULL) {
464 1.1 yamt KASSERT((refs - path->p_refs) == 0);
465 1.15 yamt path->p_lastidx =
466 1.15 yamt RADIX_TREE_INVALID_HEIGHT;
467 1.1 yamt }
468 1.1 yamt return NULL;
469 1.1 yamt }
470 1.1 yamt newheight = shift / RADIX_TREE_BITS_PER_HEIGHT + 1;
471 1.1 yamt if (radix_tree_grow(t, newheight)) {
472 1.1 yamt return NULL;
473 1.1 yamt }
474 1.1 yamt hshift = RADIX_TREE_BITS_PER_HEIGHT * t->t_height;
475 1.1 yamt }
476 1.1 yamt entry = *vpp;
477 1.1 yamt c = entry_ptr(entry);
478 1.1 yamt if (c == NULL ||
479 1.1 yamt (tagmask != 0 &&
480 1.1 yamt (entry_tagmask(entry) & tagmask) == 0)) {
481 1.1 yamt if (!alloc) {
482 1.1 yamt if (path != NULL) {
483 1.1 yamt path->p_lastidx = refs - path->p_refs;
484 1.1 yamt }
485 1.1 yamt return NULL;
486 1.1 yamt }
487 1.1 yamt c = radix_tree_alloc_node();
488 1.1 yamt if (c == NULL) {
489 1.1 yamt return NULL;
490 1.1 yamt }
491 1.1 yamt *vpp = c;
492 1.1 yamt if (n != NULL) {
493 1.1 yamt KASSERT(n->n_nptrs < RADIX_TREE_PTR_PER_NODE);
494 1.1 yamt n->n_nptrs++;
495 1.1 yamt }
496 1.1 yamt }
497 1.1 yamt n = c;
498 1.1 yamt vpp = &n->n_ptrs[i];
499 1.1 yamt if (path != NULL) {
500 1.1 yamt refs++;
501 1.1 yamt refs->pptr = vpp;
502 1.1 yamt }
503 1.1 yamt shift -= RADIX_TREE_BITS_PER_HEIGHT;
504 1.1 yamt }
505 1.1 yamt if (alloc) {
506 1.1 yamt KASSERT(*vpp == NULL);
507 1.1 yamt if (n != NULL) {
508 1.1 yamt KASSERT(n->n_nptrs < RADIX_TREE_PTR_PER_NODE);
509 1.1 yamt n->n_nptrs++;
510 1.1 yamt }
511 1.1 yamt }
512 1.1 yamt if (path != NULL) {
513 1.1 yamt path->p_lastidx = refs - path->p_refs;
514 1.1 yamt }
515 1.1 yamt return vpp;
516 1.1 yamt }
517 1.1 yamt
518 1.1 yamt /*
519 1.1 yamt * radix_tree_insert_node:
520 1.1 yamt *
521 1.17.2.2 yamt * Insert the node at the given index.
522 1.1 yamt *
523 1.17.2.2 yamt * It's illegal to insert NULL. It's illegal to insert a non-aligned pointer.
524 1.1 yamt *
525 1.17.2.2 yamt * This function returns ENOMEM if necessary memory allocation failed.
526 1.17.2.2 yamt * Otherwise, this function returns 0.
527 1.4 yamt *
528 1.17.2.2 yamt * Note that inserting a node can involves memory allocation for intermediate
529 1.17.2.2 yamt * nodes. If _KERNEL, it's done with no-sleep IPL_NONE memory allocation.
530 1.17.2.2 yamt *
531 1.17.2.2 yamt * For the newly inserted node, all tags are cleared.
532 1.1 yamt */
533 1.1 yamt
534 1.1 yamt int
535 1.1 yamt radix_tree_insert_node(struct radix_tree *t, uint64_t idx, void *p)
536 1.1 yamt {
537 1.1 yamt void **vpp;
538 1.1 yamt
539 1.1 yamt KASSERT(p != NULL);
540 1.17.2.1 yamt KASSERT(entry_tagmask(entry_compose(p, 0)) == 0);
541 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, true, 0);
542 1.1 yamt if (vpp == NULL) {
543 1.1 yamt return ENOMEM;
544 1.1 yamt }
545 1.1 yamt KASSERT(*vpp == NULL);
546 1.1 yamt *vpp = p;
547 1.1 yamt return 0;
548 1.1 yamt }
549 1.1 yamt
550 1.4 yamt /*
551 1.4 yamt * radix_tree_replace_node:
552 1.4 yamt *
553 1.17.2.2 yamt * Replace a node at the given index with the given node and return the
554 1.17.2.2 yamt * replaced one.
555 1.17.2.2 yamt *
556 1.17.2.2 yamt * It's illegal to try to replace a node which has not been inserted.
557 1.4 yamt *
558 1.17.2.2 yamt * This function keeps tags intact.
559 1.4 yamt */
560 1.4 yamt
561 1.1 yamt void *
562 1.1 yamt radix_tree_replace_node(struct radix_tree *t, uint64_t idx, void *p)
563 1.1 yamt {
564 1.1 yamt void **vpp;
565 1.1 yamt void *oldp;
566 1.1 yamt
567 1.1 yamt KASSERT(p != NULL);
568 1.17.2.1 yamt KASSERT(entry_tagmask(entry_compose(p, 0)) == 0);
569 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0);
570 1.1 yamt KASSERT(vpp != NULL);
571 1.1 yamt oldp = *vpp;
572 1.1 yamt KASSERT(oldp != NULL);
573 1.1 yamt *vpp = entry_compose(p, entry_tagmask(*vpp));
574 1.1 yamt return entry_ptr(oldp);
575 1.1 yamt }
576 1.1 yamt
577 1.1 yamt /*
578 1.1 yamt * radix_tree_remove_node:
579 1.1 yamt *
580 1.17.2.2 yamt * Remove the node at the given index.
581 1.17.2.2 yamt *
582 1.17.2.2 yamt * It's illegal to try to remove a node which has not been inserted.
583 1.1 yamt */
584 1.1 yamt
585 1.1 yamt void *
586 1.1 yamt radix_tree_remove_node(struct radix_tree *t, uint64_t idx)
587 1.1 yamt {
588 1.1 yamt struct radix_tree_path path;
589 1.1 yamt void **vpp;
590 1.1 yamt void *oldp;
591 1.1 yamt int i;
592 1.1 yamt
593 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0);
594 1.1 yamt KASSERT(vpp != NULL);
595 1.1 yamt oldp = *vpp;
596 1.1 yamt KASSERT(oldp != NULL);
597 1.1 yamt KASSERT(path.p_lastidx == t->t_height);
598 1.1 yamt KASSERT(vpp == path_pptr(t, &path, path.p_lastidx));
599 1.1 yamt *vpp = NULL;
600 1.1 yamt for (i = t->t_height - 1; i >= 0; i--) {
601 1.1 yamt void *entry;
602 1.1 yamt struct radix_tree_node ** const pptr =
603 1.1 yamt (struct radix_tree_node **)path_pptr(t, &path, i);
604 1.1 yamt struct radix_tree_node *n;
605 1.1 yamt
606 1.1 yamt KASSERT(pptr != NULL);
607 1.1 yamt entry = *pptr;
608 1.1 yamt n = entry_ptr(entry);
609 1.1 yamt KASSERT(n != NULL);
610 1.1 yamt KASSERT(n->n_nptrs > 0);
611 1.1 yamt n->n_nptrs--;
612 1.1 yamt if (n->n_nptrs > 0) {
613 1.1 yamt break;
614 1.1 yamt }
615 1.1 yamt radix_tree_free_node(n);
616 1.1 yamt *pptr = NULL;
617 1.1 yamt }
618 1.1 yamt /*
619 1.1 yamt * fix up height
620 1.1 yamt */
621 1.1 yamt if (i < 0) {
622 1.1 yamt KASSERT(t->t_root == NULL);
623 1.1 yamt t->t_height = 0;
624 1.1 yamt }
625 1.1 yamt /*
626 1.1 yamt * update tags
627 1.1 yamt */
628 1.1 yamt for (; i >= 0; i--) {
629 1.1 yamt void *entry;
630 1.1 yamt struct radix_tree_node ** const pptr =
631 1.1 yamt (struct radix_tree_node **)path_pptr(t, &path, i);
632 1.1 yamt struct radix_tree_node *n;
633 1.1 yamt unsigned int newmask;
634 1.1 yamt
635 1.1 yamt KASSERT(pptr != NULL);
636 1.1 yamt entry = *pptr;
637 1.1 yamt n = entry_ptr(entry);
638 1.1 yamt KASSERT(n != NULL);
639 1.1 yamt KASSERT(n->n_nptrs > 0);
640 1.1 yamt newmask = any_children_tagmask(n);
641 1.1 yamt if (newmask == entry_tagmask(entry)) {
642 1.1 yamt break;
643 1.1 yamt }
644 1.1 yamt *pptr = entry_compose(n, newmask);
645 1.1 yamt }
646 1.1 yamt /*
647 1.1 yamt * XXX is it worth to try to reduce height?
648 1.1 yamt * if we do that, make radix_tree_grow rollback its change as well.
649 1.1 yamt */
650 1.1 yamt return entry_ptr(oldp);
651 1.1 yamt }
652 1.1 yamt
653 1.1 yamt /*
654 1.1 yamt * radix_tree_lookup_node:
655 1.1 yamt *
656 1.17.2.2 yamt * Returns the node at the given index.
657 1.17.2.2 yamt * Returns NULL if nothing is found at the given index.
658 1.1 yamt */
659 1.1 yamt
660 1.1 yamt void *
661 1.1 yamt radix_tree_lookup_node(struct radix_tree *t, uint64_t idx)
662 1.1 yamt {
663 1.1 yamt void **vpp;
664 1.1 yamt
665 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0);
666 1.1 yamt if (vpp == NULL) {
667 1.1 yamt return NULL;
668 1.1 yamt }
669 1.1 yamt return entry_ptr(*vpp);
670 1.1 yamt }
671 1.1 yamt
672 1.1 yamt static inline void
673 1.1 yamt gang_lookup_init(struct radix_tree *t, uint64_t idx,
674 1.1 yamt struct radix_tree_path *path, const unsigned int tagmask)
675 1.1 yamt {
676 1.1 yamt void **vpp;
677 1.1 yamt
678 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, path, false, tagmask);
679 1.1 yamt KASSERT(vpp == NULL ||
680 1.1 yamt vpp == path_pptr(t, path, path->p_lastidx));
681 1.1 yamt KASSERT(&t->t_root == path_pptr(t, path, 0));
682 1.15 yamt KASSERT(path->p_lastidx == RADIX_TREE_INVALID_HEIGHT ||
683 1.15 yamt path->p_lastidx == t->t_height ||
684 1.15 yamt !entry_match_p(*path_pptr(t, path, path->p_lastidx), tagmask));
685 1.1 yamt }
686 1.1 yamt
687 1.15 yamt /*
688 1.15 yamt * gang_lookup_scan:
689 1.15 yamt *
690 1.15 yamt * a helper routine for radix_tree_gang_lookup_node and its variants.
691 1.15 yamt */
692 1.15 yamt
693 1.1 yamt static inline unsigned int
694 1.15 yamt __attribute__((__always_inline__))
695 1.1 yamt gang_lookup_scan(struct radix_tree *t, struct radix_tree_path *path,
696 1.17.2.1 yamt void **results, const unsigned int maxresults, const unsigned int tagmask,
697 1.17.2.1 yamt const bool reverse, const bool dense)
698 1.1 yamt {
699 1.15 yamt
700 1.15 yamt /*
701 1.15 yamt * we keep the path updated only for lastidx-1.
702 1.15 yamt * vpp is what path_pptr(t, path, lastidx) would be.
703 1.15 yamt */
704 1.1 yamt void **vpp;
705 1.10 yamt unsigned int nfound;
706 1.1 yamt unsigned int lastidx;
707 1.15 yamt /*
708 1.15 yamt * set up scan direction dependant constants so that we can iterate
709 1.15 yamt * n_ptrs as the following.
710 1.15 yamt *
711 1.15 yamt * for (i = first; i != guard; i += step)
712 1.15 yamt * visit n->n_ptrs[i];
713 1.15 yamt */
714 1.15 yamt const int step = reverse ? -1 : 1;
715 1.15 yamt const unsigned int first = reverse ? RADIX_TREE_PTR_PER_NODE - 1 : 0;
716 1.15 yamt const unsigned int last = reverse ? 0 : RADIX_TREE_PTR_PER_NODE - 1;
717 1.15 yamt const unsigned int guard = last + step;
718 1.1 yamt
719 1.1 yamt KASSERT(maxresults > 0);
720 1.15 yamt KASSERT(&t->t_root == path_pptr(t, path, 0));
721 1.1 yamt lastidx = path->p_lastidx;
722 1.15 yamt KASSERT(lastidx == RADIX_TREE_INVALID_HEIGHT ||
723 1.15 yamt lastidx == t->t_height ||
724 1.15 yamt !entry_match_p(*path_pptr(t, path, lastidx), tagmask));
725 1.15 yamt nfound = 0;
726 1.15 yamt if (lastidx == RADIX_TREE_INVALID_HEIGHT) {
727 1.17.2.1 yamt /*
728 1.17.2.1 yamt * requested idx is beyond the right-most node.
729 1.17.2.1 yamt */
730 1.17.2.1 yamt if (reverse && !dense) {
731 1.15 yamt lastidx = 0;
732 1.15 yamt vpp = path_pptr(t, path, lastidx);
733 1.15 yamt goto descend;
734 1.15 yamt }
735 1.1 yamt return 0;
736 1.1 yamt }
737 1.1 yamt vpp = path_pptr(t, path, lastidx);
738 1.1 yamt while (/*CONSTCOND*/true) {
739 1.1 yamt struct radix_tree_node *n;
740 1.10 yamt unsigned int i;
741 1.1 yamt
742 1.1 yamt if (entry_match_p(*vpp, tagmask)) {
743 1.1 yamt KASSERT(lastidx == t->t_height);
744 1.1 yamt /*
745 1.15 yamt * record the matching non-NULL leaf.
746 1.1 yamt */
747 1.1 yamt results[nfound] = entry_ptr(*vpp);
748 1.1 yamt nfound++;
749 1.1 yamt if (nfound == maxresults) {
750 1.1 yamt return nfound;
751 1.1 yamt }
752 1.17.2.1 yamt } else if (dense) {
753 1.17.2.1 yamt return nfound;
754 1.1 yamt }
755 1.1 yamt scan_siblings:
756 1.1 yamt /*
757 1.15 yamt * try to find the next matching non-NULL sibling.
758 1.1 yamt */
759 1.15 yamt if (lastidx == 0) {
760 1.15 yamt /*
761 1.15 yamt * the root has no siblings.
762 1.15 yamt * we've done.
763 1.15 yamt */
764 1.15 yamt KASSERT(vpp == &t->t_root);
765 1.15 yamt break;
766 1.15 yamt }
767 1.1 yamt n = path_node(t, path, lastidx - 1);
768 1.1 yamt if (*vpp != NULL && n->n_nptrs == 1) {
769 1.1 yamt /*
770 1.15 yamt * optimization; if the node has only a single pointer
771 1.15 yamt * and we've already visited it, there's no point to
772 1.15 yamt * keep scanning in this node.
773 1.1 yamt */
774 1.1 yamt goto no_siblings;
775 1.1 yamt }
776 1.15 yamt for (i = vpp - n->n_ptrs + step; i != guard; i += step) {
777 1.15 yamt KASSERT(i < RADIX_TREE_PTR_PER_NODE);
778 1.1 yamt if (entry_match_p(n->n_ptrs[i], tagmask)) {
779 1.1 yamt vpp = &n->n_ptrs[i];
780 1.1 yamt break;
781 1.1 yamt }
782 1.1 yamt }
783 1.15 yamt if (i == guard) {
784 1.1 yamt no_siblings:
785 1.1 yamt /*
786 1.1 yamt * not found. go to parent.
787 1.1 yamt */
788 1.1 yamt lastidx--;
789 1.1 yamt vpp = path_pptr(t, path, lastidx);
790 1.1 yamt goto scan_siblings;
791 1.1 yamt }
792 1.15 yamt descend:
793 1.1 yamt /*
794 1.15 yamt * following the left-most (or right-most in the case of
795 1.15 yamt * reverse scan) child node, decend until reaching the leaf or
796 1.15 yamt * an non-matching entry.
797 1.1 yamt */
798 1.1 yamt while (entry_match_p(*vpp, tagmask) && lastidx < t->t_height) {
799 1.15 yamt /*
800 1.15 yamt * save vpp in the path so that we can come back to this
801 1.15 yamt * node after finishing visiting children.
802 1.15 yamt */
803 1.15 yamt path->p_refs[lastidx].pptr = vpp;
804 1.1 yamt n = entry_ptr(*vpp);
805 1.15 yamt vpp = &n->n_ptrs[first];
806 1.1 yamt lastidx++;
807 1.1 yamt }
808 1.1 yamt }
809 1.15 yamt return nfound;
810 1.1 yamt }
811 1.1 yamt
812 1.1 yamt /*
813 1.1 yamt * radix_tree_gang_lookup_node:
814 1.1 yamt *
815 1.17.2.2 yamt * Scan the tree starting from the given index in the ascending order and
816 1.17.2.2 yamt * return found nodes.
817 1.17.2.2 yamt *
818 1.1 yamt * results should be an array large enough to hold maxresults pointers.
819 1.17.2.2 yamt * This function returns the number of nodes found, up to maxresults.
820 1.17.2.2 yamt * Returning less than maxresults means there are no more nodes in the tree.
821 1.1 yamt *
822 1.17.2.2 yamt * If dense == true, this function stops scanning when it founds a hole of
823 1.17.2.2 yamt * indexes. I.e. an index for which radix_tree_lookup_node would returns NULL.
824 1.17.2.2 yamt * If dense == false, this function skips holes and continue scanning until
825 1.17.2.1 yamt * maxresults nodes are found or it reaches the limit of the index range.
826 1.17.2.1 yamt *
827 1.17.2.2 yamt * The result of this function is semantically equivalent to what could be
828 1.1 yamt * obtained by repeated calls of radix_tree_lookup_node with increasing index.
829 1.17.2.1 yamt * but this function is expected to be computationally cheaper when looking up
830 1.17.2.2 yamt * multiple nodes at once. Especially, it's expected to be much cheaper when
831 1.17.2.1 yamt * node indexes are distributed sparsely.
832 1.17.2.1 yamt *
833 1.17.2.2 yamt * Note that this function doesn't return index values of found nodes.
834 1.17.2.2 yamt * Thus, in the case of dense == false, if index values are important for
835 1.17.2.1 yamt * a caller, it's the caller's responsibility to check them, typically
836 1.17.2.1 yamt * by examinining the returned nodes using some caller-specific knowledge
837 1.17.2.1 yamt * about them.
838 1.17.2.2 yamt * In the case of dense == true, a node returned via results[N] is always for
839 1.17.2.1 yamt * the index (idx + N).
840 1.1 yamt */
841 1.1 yamt
842 1.1 yamt unsigned int
843 1.1 yamt radix_tree_gang_lookup_node(struct radix_tree *t, uint64_t idx,
844 1.17.2.1 yamt void **results, unsigned int maxresults, bool dense)
845 1.1 yamt {
846 1.1 yamt struct radix_tree_path path;
847 1.1 yamt
848 1.1 yamt gang_lookup_init(t, idx, &path, 0);
849 1.17.2.1 yamt return gang_lookup_scan(t, &path, results, maxresults, 0, false, dense);
850 1.15 yamt }
851 1.15 yamt
852 1.15 yamt /*
853 1.15 yamt * radix_tree_gang_lookup_node_reverse:
854 1.15 yamt *
855 1.17.2.2 yamt * Same as radix_tree_gang_lookup_node except that this one scans the
856 1.17.2.2 yamt * tree in the reverse order. I.e. descending index values.
857 1.15 yamt */
858 1.15 yamt
859 1.15 yamt unsigned int
860 1.15 yamt radix_tree_gang_lookup_node_reverse(struct radix_tree *t, uint64_t idx,
861 1.17.2.1 yamt void **results, unsigned int maxresults, bool dense)
862 1.15 yamt {
863 1.15 yamt struct radix_tree_path path;
864 1.15 yamt
865 1.15 yamt gang_lookup_init(t, idx, &path, 0);
866 1.17.2.1 yamt return gang_lookup_scan(t, &path, results, maxresults, 0, true, dense);
867 1.1 yamt }
868 1.1 yamt
869 1.1 yamt /*
870 1.1 yamt * radix_tree_gang_lookup_tagged_node:
871 1.1 yamt *
872 1.17.2.2 yamt * Same as radix_tree_gang_lookup_node except that this one only returns
873 1.1 yamt * nodes tagged with tagid.
874 1.17.2.4 yamt *
875 1.17.2.4 yamt * It's illegal to call this function with tagmask 0.
876 1.1 yamt */
877 1.1 yamt
878 1.1 yamt unsigned int
879 1.1 yamt radix_tree_gang_lookup_tagged_node(struct radix_tree *t, uint64_t idx,
880 1.17.2.4 yamt void **results, unsigned int maxresults, bool dense, unsigned int tagmask)
881 1.1 yamt {
882 1.1 yamt struct radix_tree_path path;
883 1.1 yamt
884 1.17.2.4 yamt KASSERT(tagmask != 0);
885 1.1 yamt gang_lookup_init(t, idx, &path, tagmask);
886 1.17.2.1 yamt return gang_lookup_scan(t, &path, results, maxresults, tagmask, false,
887 1.17.2.1 yamt dense);
888 1.15 yamt }
889 1.15 yamt
890 1.15 yamt /*
891 1.15 yamt * radix_tree_gang_lookup_tagged_node_reverse:
892 1.15 yamt *
893 1.17.2.2 yamt * Same as radix_tree_gang_lookup_tagged_node except that this one scans the
894 1.17.2.2 yamt * tree in the reverse order. I.e. descending index values.
895 1.15 yamt */
896 1.15 yamt
897 1.15 yamt unsigned int
898 1.15 yamt radix_tree_gang_lookup_tagged_node_reverse(struct radix_tree *t, uint64_t idx,
899 1.17.2.4 yamt void **results, unsigned int maxresults, bool dense, unsigned int tagmask)
900 1.15 yamt {
901 1.15 yamt struct radix_tree_path path;
902 1.15 yamt
903 1.17.2.4 yamt KASSERT(tagmask != 0);
904 1.15 yamt gang_lookup_init(t, idx, &path, tagmask);
905 1.17.2.1 yamt return gang_lookup_scan(t, &path, results, maxresults, tagmask, true,
906 1.17.2.1 yamt dense);
907 1.1 yamt }
908 1.1 yamt
909 1.4 yamt /*
910 1.4 yamt * radix_tree_get_tag:
911 1.4 yamt *
912 1.17.2.4 yamt * Return the tagmask for the node at the given index.
913 1.17.2.2 yamt *
914 1.17.2.2 yamt * It's illegal to call this function for a node which has not been inserted.
915 1.4 yamt */
916 1.4 yamt
917 1.17.2.4 yamt unsigned int
918 1.17.2.4 yamt radix_tree_get_tag(struct radix_tree *t, uint64_t idx, unsigned int tagmask)
919 1.1 yamt {
920 1.17.2.1 yamt /*
921 1.17.2.1 yamt * the following two implementations should behave same.
922 1.17.2.1 yamt * the former one was chosen because it seems faster.
923 1.17.2.1 yamt */
924 1.1 yamt #if 1
925 1.1 yamt void **vpp;
926 1.1 yamt
927 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, false, tagmask);
928 1.1 yamt if (vpp == NULL) {
929 1.1 yamt return false;
930 1.1 yamt }
931 1.1 yamt KASSERT(*vpp != NULL);
932 1.17.2.4 yamt return (entry_tagmask(*vpp) & tagmask);
933 1.1 yamt #else
934 1.1 yamt void **vpp;
935 1.1 yamt
936 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0);
937 1.1 yamt KASSERT(vpp != NULL);
938 1.17.2.4 yamt return (entry_tagmask(*vpp) & tagmask);
939 1.1 yamt #endif
940 1.1 yamt }
941 1.1 yamt
942 1.4 yamt /*
943 1.4 yamt * radix_tree_set_tag:
944 1.4 yamt *
945 1.17.2.2 yamt * Set the tag for the node at the given index.
946 1.17.2.2 yamt *
947 1.17.2.2 yamt * It's illegal to call this function for a node which has not been inserted.
948 1.17.2.4 yamt * It's illegal to call this function with tagmask 0.
949 1.4 yamt */
950 1.4 yamt
951 1.1 yamt void
952 1.17.2.4 yamt radix_tree_set_tag(struct radix_tree *t, uint64_t idx, unsigned int tagmask)
953 1.1 yamt {
954 1.1 yamt struct radix_tree_path path;
955 1.1 yamt void **vpp;
956 1.1 yamt int i;
957 1.1 yamt
958 1.17.2.4 yamt KASSERT(tagmask != 0);
959 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0);
960 1.1 yamt KASSERT(vpp != NULL);
961 1.1 yamt KASSERT(*vpp != NULL);
962 1.1 yamt KASSERT(path.p_lastidx == t->t_height);
963 1.1 yamt KASSERT(vpp == path_pptr(t, &path, path.p_lastidx));
964 1.1 yamt for (i = t->t_height; i >= 0; i--) {
965 1.1 yamt void ** const pptr = (void **)path_pptr(t, &path, i);
966 1.1 yamt void *entry;
967 1.1 yamt
968 1.1 yamt KASSERT(pptr != NULL);
969 1.1 yamt entry = *pptr;
970 1.1 yamt if ((entry_tagmask(entry) & tagmask) != 0) {
971 1.1 yamt break;
972 1.1 yamt }
973 1.1 yamt *pptr = (void *)((uintptr_t)entry | tagmask);
974 1.1 yamt }
975 1.1 yamt }
976 1.1 yamt
977 1.4 yamt /*
978 1.4 yamt * radix_tree_clear_tag:
979 1.4 yamt *
980 1.17.2.2 yamt * Clear the tag for the node at the given index.
981 1.17.2.2 yamt *
982 1.17.2.2 yamt * It's illegal to call this function for a node which has not been inserted.
983 1.17.2.4 yamt * It's illegal to call this function with tagmask 0.
984 1.4 yamt */
985 1.4 yamt
986 1.1 yamt void
987 1.17.2.4 yamt radix_tree_clear_tag(struct radix_tree *t, uint64_t idx, unsigned int tagmask)
988 1.1 yamt {
989 1.1 yamt struct radix_tree_path path;
990 1.1 yamt void **vpp;
991 1.1 yamt int i;
992 1.1 yamt
993 1.17.2.4 yamt KASSERT(tagmask != 0);
994 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0);
995 1.1 yamt KASSERT(vpp != NULL);
996 1.1 yamt KASSERT(*vpp != NULL);
997 1.1 yamt KASSERT(path.p_lastidx == t->t_height);
998 1.1 yamt KASSERT(vpp == path_pptr(t, &path, path.p_lastidx));
999 1.7 yamt /*
1000 1.7 yamt * if already cleared, nothing to do
1001 1.7 yamt */
1002 1.1 yamt if ((entry_tagmask(*vpp) & tagmask) == 0) {
1003 1.1 yamt return;
1004 1.1 yamt }
1005 1.7 yamt /*
1006 1.7 yamt * clear the tag only if no children have the tag.
1007 1.7 yamt */
1008 1.1 yamt for (i = t->t_height; i >= 0; i--) {
1009 1.1 yamt void ** const pptr = (void **)path_pptr(t, &path, i);
1010 1.1 yamt void *entry;
1011 1.1 yamt
1012 1.1 yamt KASSERT(pptr != NULL);
1013 1.1 yamt entry = *pptr;
1014 1.1 yamt KASSERT((entry_tagmask(entry) & tagmask) != 0);
1015 1.1 yamt *pptr = entry_compose(entry_ptr(entry),
1016 1.1 yamt entry_tagmask(entry) & ~tagmask);
1017 1.7 yamt /*
1018 1.7 yamt * check if we should proceed to process the next level.
1019 1.7 yamt */
1020 1.7 yamt if (0 < i) {
1021 1.1 yamt struct radix_tree_node *n = path_node(t, &path, i - 1);
1022 1.1 yamt
1023 1.1 yamt if ((any_children_tagmask(n) & tagmask) != 0) {
1024 1.1 yamt break;
1025 1.1 yamt }
1026 1.1 yamt }
1027 1.1 yamt }
1028 1.1 yamt }
1029 1.1 yamt
1030 1.1 yamt #if defined(UNITTEST)
1031 1.1 yamt
1032 1.1 yamt #include <inttypes.h>
1033 1.1 yamt #include <stdio.h>
1034 1.1 yamt
1035 1.1 yamt static void
1036 1.1 yamt radix_tree_dump_node(const struct radix_tree *t, void *vp,
1037 1.1 yamt uint64_t offset, unsigned int height)
1038 1.1 yamt {
1039 1.1 yamt struct radix_tree_node *n;
1040 1.1 yamt unsigned int i;
1041 1.1 yamt
1042 1.1 yamt for (i = 0; i < t->t_height - height; i++) {
1043 1.1 yamt printf(" ");
1044 1.1 yamt }
1045 1.1 yamt if (entry_tagmask(vp) == 0) {
1046 1.1 yamt printf("[%" PRIu64 "] %p", offset, entry_ptr(vp));
1047 1.1 yamt } else {
1048 1.1 yamt printf("[%" PRIu64 "] %p (tagmask=0x%x)", offset, entry_ptr(vp),
1049 1.1 yamt entry_tagmask(vp));
1050 1.1 yamt }
1051 1.1 yamt if (height == 0) {
1052 1.1 yamt printf(" (leaf)\n");
1053 1.1 yamt return;
1054 1.1 yamt }
1055 1.1 yamt n = entry_ptr(vp);
1056 1.1 yamt assert(any_children_tagmask(n) == entry_tagmask(vp));
1057 1.1 yamt printf(" (%u children)\n", n->n_nptrs);
1058 1.1 yamt for (i = 0; i < __arraycount(n->n_ptrs); i++) {
1059 1.1 yamt void *c;
1060 1.1 yamt
1061 1.1 yamt c = n->n_ptrs[i];
1062 1.1 yamt if (c == NULL) {
1063 1.1 yamt continue;
1064 1.1 yamt }
1065 1.1 yamt radix_tree_dump_node(t, c,
1066 1.1 yamt offset + i * (UINT64_C(1) <<
1067 1.1 yamt (RADIX_TREE_BITS_PER_HEIGHT * (height - 1))), height - 1);
1068 1.1 yamt }
1069 1.1 yamt }
1070 1.1 yamt
1071 1.1 yamt void radix_tree_dump(const struct radix_tree *);
1072 1.1 yamt
1073 1.1 yamt void
1074 1.1 yamt radix_tree_dump(const struct radix_tree *t)
1075 1.1 yamt {
1076 1.1 yamt
1077 1.1 yamt printf("tree %p height=%u\n", t, t->t_height);
1078 1.1 yamt radix_tree_dump_node(t, t->t_root, 0, t->t_height);
1079 1.1 yamt }
1080 1.1 yamt
1081 1.1 yamt static void
1082 1.1 yamt test1(void)
1083 1.1 yamt {
1084 1.1 yamt struct radix_tree s;
1085 1.1 yamt struct radix_tree *t = &s;
1086 1.1 yamt void *results[3];
1087 1.1 yamt
1088 1.1 yamt radix_tree_init_tree(t);
1089 1.1 yamt radix_tree_dump(t);
1090 1.1 yamt assert(radix_tree_lookup_node(t, 0) == NULL);
1091 1.1 yamt assert(radix_tree_lookup_node(t, 1000) == NULL);
1092 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 0);
1093 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 0);
1094 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 1000, results, 3, false) == 0);
1095 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 1000, results, 3, true) == 0);
1096 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, false) ==
1097 1.17.2.1 yamt 0);
1098 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, true) ==
1099 1.17.2.1 yamt 0);
1100 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, false)
1101 1.17.2.1 yamt == 0);
1102 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, true)
1103 1.17.2.1 yamt == 0);
1104 1.17.2.4 yamt assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, false, 1)
1105 1.17.2.1 yamt == 0);
1106 1.17.2.4 yamt assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, true, 1)
1107 1.15 yamt == 0);
1108 1.17.2.4 yamt assert(radix_tree_gang_lookup_tagged_node(t, 1000, results, 3, false, 1)
1109 1.17.2.1 yamt == 0);
1110 1.17.2.4 yamt assert(radix_tree_gang_lookup_tagged_node(t, 1000, results, 3, true, 1)
1111 1.17.2.1 yamt == 0);
1112 1.17.2.1 yamt assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1113 1.17.2.4 yamt false, 1) == 0);
1114 1.17.2.1 yamt assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1115 1.17.2.4 yamt true, 1) == 0);
1116 1.17.2.1 yamt assert(radix_tree_gang_lookup_tagged_node_reverse(t, 1000, results, 3,
1117 1.17.2.4 yamt false, 1) == 0);
1118 1.15 yamt assert(radix_tree_gang_lookup_tagged_node_reverse(t, 1000, results, 3,
1119 1.17.2.4 yamt true, 1) == 0);
1120 1.15 yamt assert(radix_tree_empty_tree_p(t));
1121 1.16 yamt assert(radix_tree_empty_tagged_tree_p(t, 1));
1122 1.17.2.4 yamt assert(radix_tree_empty_tagged_tree_p(t, 2));
1123 1.15 yamt assert(radix_tree_insert_node(t, 0, (void *)0xdeadbea0) == 0);
1124 1.15 yamt assert(!radix_tree_empty_tree_p(t));
1125 1.16 yamt assert(radix_tree_empty_tagged_tree_p(t, 1));
1126 1.17.2.4 yamt assert(radix_tree_empty_tagged_tree_p(t, 2));
1127 1.15 yamt assert(radix_tree_lookup_node(t, 0) == (void *)0xdeadbea0);
1128 1.15 yamt assert(radix_tree_lookup_node(t, 1000) == NULL);
1129 1.15 yamt memset(results, 0, sizeof(results));
1130 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 1);
1131 1.17.2.1 yamt assert(results[0] == (void *)0xdeadbea0);
1132 1.17.2.1 yamt memset(results, 0, sizeof(results));
1133 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 1);
1134 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1135 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 1000, results, 3, false) == 0);
1136 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 1000, results, 3, true) == 0);
1137 1.15 yamt memset(results, 0, sizeof(results));
1138 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, false) ==
1139 1.17.2.1 yamt 1);
1140 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1141 1.15 yamt memset(results, 0, sizeof(results));
1142 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, true) ==
1143 1.17.2.1 yamt 1);
1144 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1145 1.17.2.1 yamt memset(results, 0, sizeof(results));
1146 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, false)
1147 1.17.2.1 yamt == 1);
1148 1.17.2.1 yamt assert(results[0] == (void *)0xdeadbea0);
1149 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, true)
1150 1.17.2.1 yamt == 0);
1151 1.17.2.4 yamt assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, false, 1)
1152 1.15 yamt == 0);
1153 1.17.2.4 yamt assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, true, 1)
1154 1.15 yamt == 0);
1155 1.17.2.1 yamt assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1156 1.17.2.4 yamt false, 1) == 0);
1157 1.17.2.1 yamt assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1158 1.17.2.4 yamt true, 1) == 0);
1159 1.1 yamt assert(radix_tree_insert_node(t, 1000, (void *)0xdeadbea0) == 0);
1160 1.15 yamt assert(radix_tree_remove_node(t, 0) == (void *)0xdeadbea0);
1161 1.15 yamt assert(!radix_tree_empty_tree_p(t));
1162 1.1 yamt radix_tree_dump(t);
1163 1.15 yamt assert(radix_tree_lookup_node(t, 0) == NULL);
1164 1.15 yamt assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1165 1.15 yamt memset(results, 0, sizeof(results));
1166 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 1);
1167 1.17.2.1 yamt assert(results[0] == (void *)0xdeadbea0);
1168 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 0);
1169 1.17.2.1 yamt memset(results, 0, sizeof(results));
1170 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 1000, results, 3, false) == 1);
1171 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1172 1.15 yamt memset(results, 0, sizeof(results));
1173 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 1000, results, 3, true) == 1);
1174 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1175 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, false)
1176 1.17.2.1 yamt == 0);
1177 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3, true)
1178 1.17.2.1 yamt == 0);
1179 1.17.2.1 yamt memset(results, 0, sizeof(results));
1180 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, false)
1181 1.17.2.1 yamt == 1);
1182 1.15 yamt memset(results, 0, sizeof(results));
1183 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3, true)
1184 1.17.2.1 yamt == 1);
1185 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1186 1.17.2.4 yamt assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, false, 1)
1187 1.15 yamt == 0);
1188 1.17.2.4 yamt assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, true, 1)
1189 1.15 yamt == 0);
1190 1.17.2.1 yamt assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1191 1.17.2.4 yamt false, 1) == 0);
1192 1.17.2.1 yamt assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3,
1193 1.17.2.4 yamt true, 1) == 0);
1194 1.1 yamt assert(!radix_tree_get_tag(t, 1000, 1));
1195 1.17.2.4 yamt assert(!radix_tree_get_tag(t, 1000, 2));
1196 1.17.2.4 yamt assert(radix_tree_get_tag(t, 1000, 2 | 1) == 0);
1197 1.16 yamt assert(radix_tree_empty_tagged_tree_p(t, 1));
1198 1.17.2.4 yamt assert(radix_tree_empty_tagged_tree_p(t, 2));
1199 1.17.2.4 yamt radix_tree_set_tag(t, 1000, 2);
1200 1.17.2.4 yamt assert(!radix_tree_get_tag(t, 1000, 1));
1201 1.17.2.4 yamt assert(radix_tree_get_tag(t, 1000, 2));
1202 1.17.2.4 yamt assert(radix_tree_get_tag(t, 1000, 2 | 1) == 2);
1203 1.17.2.4 yamt assert(radix_tree_empty_tagged_tree_p(t, 1));
1204 1.17.2.4 yamt assert(!radix_tree_empty_tagged_tree_p(t, 2));
1205 1.1 yamt radix_tree_dump(t);
1206 1.1 yamt assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1207 1.1 yamt assert(radix_tree_insert_node(t, 0, (void *)0xbea0) == 0);
1208 1.1 yamt radix_tree_dump(t);
1209 1.1 yamt assert(radix_tree_lookup_node(t, 0) == (void *)0xbea0);
1210 1.1 yamt assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1211 1.1 yamt assert(radix_tree_insert_node(t, UINT64_C(10000000000), (void *)0xdea0)
1212 1.1 yamt == 0);
1213 1.1 yamt radix_tree_dump(t);
1214 1.1 yamt assert(radix_tree_lookup_node(t, 0) == (void *)0xbea0);
1215 1.1 yamt assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1216 1.1 yamt assert(radix_tree_lookup_node(t, UINT64_C(10000000000)) ==
1217 1.1 yamt (void *)0xdea0);
1218 1.1 yamt radix_tree_dump(t);
1219 1.17.2.4 yamt assert(!radix_tree_get_tag(t, 0, 2));
1220 1.17.2.4 yamt assert(radix_tree_get_tag(t, 1000, 2));
1221 1.1 yamt assert(!radix_tree_get_tag(t, UINT64_C(10000000000), 1));
1222 1.17.2.4 yamt radix_tree_set_tag(t, 0, 2);;
1223 1.17.2.4 yamt radix_tree_set_tag(t, UINT64_C(10000000000), 2);
1224 1.1 yamt radix_tree_dump(t);
1225 1.17.2.4 yamt assert(radix_tree_get_tag(t, 0, 2));
1226 1.17.2.4 yamt assert(radix_tree_get_tag(t, 1000, 2));
1227 1.17.2.4 yamt assert(radix_tree_get_tag(t, UINT64_C(10000000000), 2));
1228 1.17.2.4 yamt radix_tree_clear_tag(t, 0, 2);;
1229 1.17.2.4 yamt radix_tree_clear_tag(t, UINT64_C(10000000000), 2);
1230 1.1 yamt radix_tree_dump(t);
1231 1.17.2.4 yamt assert(!radix_tree_get_tag(t, 0, 2));
1232 1.17.2.4 yamt assert(radix_tree_get_tag(t, 1000, 2));
1233 1.17.2.4 yamt assert(!radix_tree_get_tag(t, UINT64_C(10000000000), 2));
1234 1.1 yamt radix_tree_dump(t);
1235 1.1 yamt assert(radix_tree_replace_node(t, 1000, (void *)0x12345678) ==
1236 1.1 yamt (void *)0xdeadbea0);
1237 1.17.2.4 yamt assert(!radix_tree_get_tag(t, 1000, 1));
1238 1.17.2.4 yamt assert(radix_tree_get_tag(t, 1000, 2));
1239 1.17.2.4 yamt assert(radix_tree_get_tag(t, 1000, 2 | 1) == 2);
1240 1.17.2.1 yamt memset(results, 0, sizeof(results));
1241 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 0, results, 3, false) == 3);
1242 1.1 yamt assert(results[0] == (void *)0xbea0);
1243 1.1 yamt assert(results[1] == (void *)0x12345678);
1244 1.1 yamt assert(results[2] == (void *)0xdea0);
1245 1.17.2.1 yamt memset(results, 0, sizeof(results));
1246 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 0, results, 3, true) == 1);
1247 1.17.2.1 yamt assert(results[0] == (void *)0xbea0);
1248 1.17.2.1 yamt memset(results, 0, sizeof(results));
1249 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 1, results, 3, false) == 2);
1250 1.1 yamt assert(results[0] == (void *)0x12345678);
1251 1.1 yamt assert(results[1] == (void *)0xdea0);
1252 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 1, results, 3, true) == 0);
1253 1.17.2.1 yamt memset(results, 0, sizeof(results));
1254 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 1001, results, 3, false) == 1);
1255 1.1 yamt assert(results[0] == (void *)0xdea0);
1256 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, 1001, results, 3, true) == 0);
1257 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000001), results, 3,
1258 1.17.2.1 yamt false) == 0);
1259 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000001), results, 3,
1260 1.17.2.1 yamt true) == 0);
1261 1.1 yamt assert(radix_tree_gang_lookup_node(t, UINT64_C(1000000000000), results,
1262 1.17.2.1 yamt 3, false) == 0);
1263 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, UINT64_C(1000000000000), results,
1264 1.17.2.1 yamt 3, true) == 0);
1265 1.17.2.1 yamt memset(results, 0, sizeof(results));
1266 1.17.2.4 yamt assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 100, false, 2)
1267 1.17.2.1 yamt == 1);
1268 1.1 yamt assert(results[0] == (void *)0x12345678);
1269 1.17.2.4 yamt assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 100, true, 2)
1270 1.17.2.1 yamt == 0);
1271 1.1 yamt assert(entry_tagmask(t->t_root) != 0);
1272 1.1 yamt assert(radix_tree_remove_node(t, 1000) == (void *)0x12345678);
1273 1.1 yamt assert(entry_tagmask(t->t_root) == 0);
1274 1.1 yamt radix_tree_dump(t);
1275 1.17.2.1 yamt assert(radix_tree_insert_node(t, UINT64_C(10000000001), (void *)0xfff0)
1276 1.17.2.1 yamt == 0);
1277 1.17.2.1 yamt memset(results, 0, sizeof(results));
1278 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000000), results, 3,
1279 1.17.2.1 yamt false) == 2);
1280 1.17.2.1 yamt assert(results[0] == (void *)0xdea0);
1281 1.17.2.1 yamt assert(results[1] == (void *)0xfff0);
1282 1.17.2.1 yamt memset(results, 0, sizeof(results));
1283 1.17.2.1 yamt assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000000), results, 3,
1284 1.17.2.1 yamt true) == 2);
1285 1.17.2.1 yamt assert(results[0] == (void *)0xdea0);
1286 1.17.2.1 yamt assert(results[1] == (void *)0xfff0);
1287 1.17.2.1 yamt memset(results, 0, sizeof(results));
1288 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, UINT64_C(10000000001),
1289 1.17.2.1 yamt results, 3, false) == 3);
1290 1.17.2.1 yamt assert(results[0] == (void *)0xfff0);
1291 1.17.2.1 yamt assert(results[1] == (void *)0xdea0);
1292 1.17.2.1 yamt assert(results[2] == (void *)0xbea0);
1293 1.17.2.1 yamt memset(results, 0, sizeof(results));
1294 1.17.2.1 yamt assert(radix_tree_gang_lookup_node_reverse(t, UINT64_C(10000000001),
1295 1.17.2.1 yamt results, 3, true) == 2);
1296 1.17.2.1 yamt assert(results[0] == (void *)0xfff0);
1297 1.17.2.1 yamt assert(results[1] == (void *)0xdea0);
1298 1.1 yamt assert(radix_tree_remove_node(t, UINT64_C(10000000000)) ==
1299 1.1 yamt (void *)0xdea0);
1300 1.17.2.1 yamt assert(radix_tree_remove_node(t, UINT64_C(10000000001)) ==
1301 1.17.2.1 yamt (void *)0xfff0);
1302 1.1 yamt radix_tree_dump(t);
1303 1.1 yamt assert(radix_tree_remove_node(t, 0) == (void *)0xbea0);
1304 1.1 yamt radix_tree_dump(t);
1305 1.1 yamt radix_tree_fini_tree(t);
1306 1.1 yamt }
1307 1.1 yamt
1308 1.1 yamt #include <sys/time.h>
1309 1.1 yamt
1310 1.1 yamt struct testnode {
1311 1.1 yamt uint64_t idx;
1312 1.12 yamt bool tagged[RADIX_TREE_TAG_ID_MAX];
1313 1.1 yamt };
1314 1.1 yamt
1315 1.1 yamt static void
1316 1.11 yamt printops(const char *title, const char *name, int tag, unsigned int n,
1317 1.11 yamt const struct timeval *stv, const struct timeval *etv)
1318 1.1 yamt {
1319 1.1 yamt uint64_t s = stv->tv_sec * 1000000 + stv->tv_usec;
1320 1.1 yamt uint64_t e = etv->tv_sec * 1000000 + etv->tv_usec;
1321 1.1 yamt
1322 1.11 yamt printf("RESULT %s %s %d %lf op/s\n", title, name, tag,
1323 1.11 yamt (double)n / (e - s) * 1000000);
1324 1.1 yamt }
1325 1.1 yamt
1326 1.1 yamt #define TEST2_GANG_LOOKUP_NODES 16
1327 1.1 yamt
1328 1.1 yamt static bool
1329 1.17.2.4 yamt test2_should_tag(unsigned int i, unsigned int tagid)
1330 1.1 yamt {
1331 1.1 yamt
1332 1.1 yamt if (tagid == 0) {
1333 1.17.2.4 yamt return (i % 4) == 0; /* 25% */
1334 1.1 yamt } else {
1335 1.11 yamt return (i % 7) == 0; /* 14% */
1336 1.1 yamt }
1337 1.17.2.4 yamt return 1;
1338 1.17.2.4 yamt }
1339 1.17.2.4 yamt
1340 1.17.2.4 yamt static void
1341 1.17.2.4 yamt check_tag_count(const unsigned int *ntagged, unsigned int tagmask,
1342 1.17.2.4 yamt unsigned int count)
1343 1.17.2.4 yamt {
1344 1.17.2.4 yamt unsigned int tag;
1345 1.17.2.4 yamt
1346 1.17.2.4 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1347 1.17.2.4 yamt if ((tagmask & (1 << tag)) == 0) {
1348 1.17.2.4 yamt continue;
1349 1.17.2.4 yamt }
1350 1.17.2.4 yamt if (((tagmask - 1) & tagmask) == 0) {
1351 1.17.2.4 yamt assert(count == ntagged[tag]);
1352 1.17.2.4 yamt } else {
1353 1.17.2.4 yamt assert(count >= ntagged[tag]);
1354 1.17.2.4 yamt }
1355 1.17.2.4 yamt }
1356 1.1 yamt }
1357 1.1 yamt
1358 1.1 yamt static void
1359 1.11 yamt test2(const char *title, bool dense)
1360 1.1 yamt {
1361 1.1 yamt struct radix_tree s;
1362 1.1 yamt struct radix_tree *t = &s;
1363 1.1 yamt struct testnode *n;
1364 1.1 yamt unsigned int i;
1365 1.1 yamt unsigned int nnodes = 100000;
1366 1.1 yamt unsigned int removed;
1367 1.17.2.4 yamt unsigned int tag;
1368 1.17.2.4 yamt unsigned int tagmask;
1369 1.1 yamt unsigned int ntagged[RADIX_TREE_TAG_ID_MAX];
1370 1.1 yamt struct testnode *nodes;
1371 1.1 yamt struct timeval stv;
1372 1.1 yamt struct timeval etv;
1373 1.1 yamt
1374 1.1 yamt nodes = malloc(nnodes * sizeof(*nodes));
1375 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1376 1.1 yamt ntagged[tag] = 0;
1377 1.1 yamt }
1378 1.1 yamt radix_tree_init_tree(t);
1379 1.1 yamt for (i = 0; i < nnodes; i++) {
1380 1.1 yamt n = &nodes[i];
1381 1.1 yamt n->idx = random();
1382 1.1 yamt if (sizeof(long) == 4) {
1383 1.1 yamt n->idx <<= 32;
1384 1.1 yamt n->idx |= (uint32_t)random();
1385 1.1 yamt }
1386 1.1 yamt if (dense) {
1387 1.1 yamt n->idx %= nnodes * 2;
1388 1.1 yamt }
1389 1.1 yamt while (radix_tree_lookup_node(t, n->idx) != NULL) {
1390 1.1 yamt n->idx++;
1391 1.1 yamt }
1392 1.1 yamt radix_tree_insert_node(t, n->idx, n);
1393 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1394 1.17.2.4 yamt tagmask = 1 << tag;
1395 1.17.2.4 yamt
1396 1.12 yamt n->tagged[tag] = test2_should_tag(i, tag);
1397 1.12 yamt if (n->tagged[tag]) {
1398 1.17.2.4 yamt radix_tree_set_tag(t, n->idx, tagmask);
1399 1.1 yamt ntagged[tag]++;
1400 1.1 yamt }
1401 1.17.2.4 yamt assert((n->tagged[tag] ? tagmask : 0) ==
1402 1.17.2.4 yamt radix_tree_get_tag(t, n->idx, tagmask));
1403 1.1 yamt }
1404 1.1 yamt }
1405 1.1 yamt
1406 1.1 yamt gettimeofday(&stv, NULL);
1407 1.1 yamt for (i = 0; i < nnodes; i++) {
1408 1.1 yamt n = &nodes[i];
1409 1.1 yamt assert(radix_tree_lookup_node(t, n->idx) == n);
1410 1.1 yamt }
1411 1.1 yamt gettimeofday(&etv, NULL);
1412 1.11 yamt printops(title, "lookup", 0, nnodes, &stv, &etv);
1413 1.1 yamt
1414 1.17.2.4 yamt for (tagmask = 1; tagmask <= RADIX_TREE_TAG_MASK; tagmask ++) {
1415 1.12 yamt unsigned int count = 0;
1416 1.12 yamt
1417 1.1 yamt gettimeofday(&stv, NULL);
1418 1.1 yamt for (i = 0; i < nnodes; i++) {
1419 1.17.2.4 yamt unsigned int tagged;
1420 1.12 yamt
1421 1.1 yamt n = &nodes[i];
1422 1.17.2.4 yamt tagged = radix_tree_get_tag(t, n->idx, tagmask);
1423 1.17.2.4 yamt assert((tagged & ~tagmask) == 0);
1424 1.17.2.4 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1425 1.17.2.4 yamt assert((tagmask & (1 << tag)) == 0 ||
1426 1.17.2.4 yamt n->tagged[tag] == !!(tagged & (1 << tag)));
1427 1.17.2.4 yamt }
1428 1.12 yamt if (tagged) {
1429 1.12 yamt count++;
1430 1.12 yamt }
1431 1.1 yamt }
1432 1.1 yamt gettimeofday(&etv, NULL);
1433 1.17.2.4 yamt check_tag_count(ntagged, tagmask, count);
1434 1.17.2.4 yamt printops(title, "get_tag", tagmask, nnodes, &stv, &etv);
1435 1.1 yamt }
1436 1.1 yamt
1437 1.1 yamt gettimeofday(&stv, NULL);
1438 1.1 yamt for (i = 0; i < nnodes; i++) {
1439 1.1 yamt n = &nodes[i];
1440 1.1 yamt radix_tree_remove_node(t, n->idx);
1441 1.1 yamt }
1442 1.1 yamt gettimeofday(&etv, NULL);
1443 1.11 yamt printops(title, "remove", 0, nnodes, &stv, &etv);
1444 1.1 yamt
1445 1.1 yamt gettimeofday(&stv, NULL);
1446 1.1 yamt for (i = 0; i < nnodes; i++) {
1447 1.1 yamt n = &nodes[i];
1448 1.1 yamt radix_tree_insert_node(t, n->idx, n);
1449 1.1 yamt }
1450 1.1 yamt gettimeofday(&etv, NULL);
1451 1.11 yamt printops(title, "insert", 0, nnodes, &stv, &etv);
1452 1.1 yamt
1453 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1454 1.17.2.4 yamt tagmask = 1 << tag;
1455 1.17.2.4 yamt
1456 1.1 yamt ntagged[tag] = 0;
1457 1.1 yamt gettimeofday(&stv, NULL);
1458 1.1 yamt for (i = 0; i < nnodes; i++) {
1459 1.1 yamt n = &nodes[i];
1460 1.12 yamt if (n->tagged[tag]) {
1461 1.17.2.4 yamt radix_tree_set_tag(t, n->idx, tagmask);
1462 1.1 yamt ntagged[tag]++;
1463 1.1 yamt }
1464 1.1 yamt }
1465 1.1 yamt gettimeofday(&etv, NULL);
1466 1.11 yamt printops(title, "set_tag", tag, ntagged[tag], &stv, &etv);
1467 1.1 yamt }
1468 1.1 yamt
1469 1.1 yamt gettimeofday(&stv, NULL);
1470 1.1 yamt {
1471 1.1 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1472 1.1 yamt uint64_t nextidx;
1473 1.1 yamt unsigned int nfound;
1474 1.1 yamt unsigned int total;
1475 1.1 yamt
1476 1.1 yamt nextidx = 0;
1477 1.1 yamt total = 0;
1478 1.1 yamt while ((nfound = radix_tree_gang_lookup_node(t, nextidx,
1479 1.17.2.1 yamt (void *)results, __arraycount(results), false)) > 0) {
1480 1.1 yamt nextidx = results[nfound - 1]->idx + 1;
1481 1.1 yamt total += nfound;
1482 1.15 yamt if (nextidx == 0) {
1483 1.15 yamt break;
1484 1.15 yamt }
1485 1.1 yamt }
1486 1.1 yamt assert(total == nnodes);
1487 1.1 yamt }
1488 1.1 yamt gettimeofday(&etv, NULL);
1489 1.11 yamt printops(title, "ganglookup", 0, nnodes, &stv, &etv);
1490 1.1 yamt
1491 1.15 yamt gettimeofday(&stv, NULL);
1492 1.15 yamt {
1493 1.15 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1494 1.15 yamt uint64_t nextidx;
1495 1.15 yamt unsigned int nfound;
1496 1.15 yamt unsigned int total;
1497 1.15 yamt
1498 1.15 yamt nextidx = UINT64_MAX;
1499 1.15 yamt total = 0;
1500 1.15 yamt while ((nfound = radix_tree_gang_lookup_node_reverse(t, nextidx,
1501 1.17.2.1 yamt (void *)results, __arraycount(results), false)) > 0) {
1502 1.15 yamt nextidx = results[nfound - 1]->idx - 1;
1503 1.15 yamt total += nfound;
1504 1.15 yamt if (nextidx == UINT64_MAX) {
1505 1.15 yamt break;
1506 1.15 yamt }
1507 1.15 yamt }
1508 1.15 yamt assert(total == nnodes);
1509 1.15 yamt }
1510 1.15 yamt gettimeofday(&etv, NULL);
1511 1.15 yamt printops(title, "ganglookup_reverse", 0, nnodes, &stv, &etv);
1512 1.15 yamt
1513 1.17.2.4 yamt for (tagmask = 1; tagmask <= RADIX_TREE_TAG_MASK; tagmask ++) {
1514 1.17.2.4 yamt unsigned int total = 0;
1515 1.17.2.4 yamt
1516 1.1 yamt gettimeofday(&stv, NULL);
1517 1.1 yamt {
1518 1.1 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1519 1.1 yamt uint64_t nextidx;
1520 1.1 yamt unsigned int nfound;
1521 1.1 yamt
1522 1.1 yamt nextidx = 0;
1523 1.1 yamt while ((nfound = radix_tree_gang_lookup_tagged_node(t,
1524 1.1 yamt nextidx, (void *)results, __arraycount(results),
1525 1.17.2.4 yamt false, tagmask)) > 0) {
1526 1.1 yamt nextidx = results[nfound - 1]->idx + 1;
1527 1.1 yamt total += nfound;
1528 1.1 yamt }
1529 1.1 yamt }
1530 1.1 yamt gettimeofday(&etv, NULL);
1531 1.17.2.4 yamt check_tag_count(ntagged, tagmask, total);
1532 1.17.2.4 yamt assert(tagmask != 0 || total == 0);
1533 1.17.2.4 yamt printops(title, "ganglookup_tag", tagmask, total, &stv, &etv);
1534 1.1 yamt }
1535 1.1 yamt
1536 1.17.2.4 yamt for (tagmask = 1; tagmask <= RADIX_TREE_TAG_MASK; tagmask ++) {
1537 1.17.2.4 yamt unsigned int total = 0;
1538 1.17.2.4 yamt
1539 1.15 yamt gettimeofday(&stv, NULL);
1540 1.15 yamt {
1541 1.15 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1542 1.15 yamt uint64_t nextidx;
1543 1.15 yamt unsigned int nfound;
1544 1.15 yamt
1545 1.15 yamt nextidx = UINT64_MAX;
1546 1.15 yamt while ((nfound =
1547 1.15 yamt radix_tree_gang_lookup_tagged_node_reverse(t,
1548 1.15 yamt nextidx, (void *)results, __arraycount(results),
1549 1.17.2.4 yamt false, tagmask)) > 0) {
1550 1.15 yamt nextidx = results[nfound - 1]->idx - 1;
1551 1.15 yamt total += nfound;
1552 1.15 yamt if (nextidx == UINT64_MAX) {
1553 1.15 yamt break;
1554 1.15 yamt }
1555 1.15 yamt }
1556 1.15 yamt }
1557 1.15 yamt gettimeofday(&etv, NULL);
1558 1.17.2.4 yamt check_tag_count(ntagged, tagmask, total);
1559 1.17.2.4 yamt assert(tagmask != 0 || total == 0);
1560 1.17.2.4 yamt printops(title, "ganglookup_tag_reverse", tagmask, total,
1561 1.15 yamt &stv, &etv);
1562 1.15 yamt }
1563 1.15 yamt
1564 1.1 yamt removed = 0;
1565 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1566 1.1 yamt unsigned int total;
1567 1.1 yamt
1568 1.1 yamt total = 0;
1569 1.17.2.4 yamt tagmask = 1 << tag;
1570 1.1 yamt gettimeofday(&stv, NULL);
1571 1.1 yamt {
1572 1.1 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1573 1.1 yamt uint64_t nextidx;
1574 1.1 yamt unsigned int nfound;
1575 1.1 yamt
1576 1.1 yamt nextidx = 0;
1577 1.1 yamt while ((nfound = radix_tree_gang_lookup_tagged_node(t,
1578 1.1 yamt nextidx, (void *)results, __arraycount(results),
1579 1.17.2.4 yamt false, tagmask)) > 0) {
1580 1.1 yamt for (i = 0; i < nfound; i++) {
1581 1.1 yamt radix_tree_remove_node(t,
1582 1.1 yamt results[i]->idx);
1583 1.1 yamt }
1584 1.1 yamt nextidx = results[nfound - 1]->idx + 1;
1585 1.1 yamt total += nfound;
1586 1.15 yamt if (nextidx == 0) {
1587 1.15 yamt break;
1588 1.15 yamt }
1589 1.1 yamt }
1590 1.1 yamt }
1591 1.1 yamt gettimeofday(&etv, NULL);
1592 1.17.2.4 yamt if (tag == 0) {
1593 1.17.2.4 yamt check_tag_count(ntagged, tagmask, total);
1594 1.17.2.4 yamt } else {
1595 1.17.2.4 yamt assert(total <= ntagged[tag]);
1596 1.17.2.4 yamt }
1597 1.17.2.4 yamt printops(title, "ganglookup_tag+remove", tagmask, total, &stv,
1598 1.11 yamt &etv);
1599 1.1 yamt removed += total;
1600 1.1 yamt }
1601 1.1 yamt
1602 1.1 yamt gettimeofday(&stv, NULL);
1603 1.1 yamt {
1604 1.1 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1605 1.1 yamt uint64_t nextidx;
1606 1.1 yamt unsigned int nfound;
1607 1.1 yamt unsigned int total;
1608 1.1 yamt
1609 1.1 yamt nextidx = 0;
1610 1.1 yamt total = 0;
1611 1.1 yamt while ((nfound = radix_tree_gang_lookup_node(t, nextidx,
1612 1.17.2.1 yamt (void *)results, __arraycount(results), false)) > 0) {
1613 1.1 yamt for (i = 0; i < nfound; i++) {
1614 1.1 yamt assert(results[i] == radix_tree_remove_node(t,
1615 1.1 yamt results[i]->idx));
1616 1.1 yamt }
1617 1.1 yamt nextidx = results[nfound - 1]->idx + 1;
1618 1.1 yamt total += nfound;
1619 1.15 yamt if (nextidx == 0) {
1620 1.15 yamt break;
1621 1.15 yamt }
1622 1.1 yamt }
1623 1.1 yamt assert(total == nnodes - removed);
1624 1.1 yamt }
1625 1.1 yamt gettimeofday(&etv, NULL);
1626 1.11 yamt printops(title, "ganglookup+remove", 0, nnodes - removed, &stv, &etv);
1627 1.1 yamt
1628 1.16 yamt assert(radix_tree_empty_tree_p(t));
1629 1.17.2.4 yamt for (tagmask = 1; tagmask <= RADIX_TREE_TAG_MASK; tagmask ++) {
1630 1.17.2.4 yamt assert(radix_tree_empty_tagged_tree_p(t, tagmask));
1631 1.17.2.4 yamt }
1632 1.1 yamt radix_tree_fini_tree(t);
1633 1.1 yamt free(nodes);
1634 1.1 yamt }
1635 1.1 yamt
1636 1.1 yamt int
1637 1.1 yamt main(int argc, char *argv[])
1638 1.1 yamt {
1639 1.1 yamt
1640 1.1 yamt test1();
1641 1.11 yamt test2("dense", true);
1642 1.11 yamt test2("sparse", false);
1643 1.1 yamt return 0;
1644 1.1 yamt }
1645 1.1 yamt
1646 1.1 yamt #endif /* defined(UNITTEST) */
1647