radixtree.c revision 1.15 1 1.15 yamt /* $NetBSD: radixtree.c,v 1.15 2011/10/14 19:42:15 yamt Exp $ */
2 1.1 yamt
3 1.1 yamt /*-
4 1.1 yamt * Copyright (c)2011 YAMAMOTO Takashi,
5 1.1 yamt * All rights reserved.
6 1.1 yamt *
7 1.1 yamt * Redistribution and use in source and binary forms, with or without
8 1.1 yamt * modification, are permitted provided that the following conditions
9 1.1 yamt * are met:
10 1.1 yamt * 1. Redistributions of source code must retain the above copyright
11 1.1 yamt * notice, this list of conditions and the following disclaimer.
12 1.1 yamt * 2. Redistributions in binary form must reproduce the above copyright
13 1.1 yamt * notice, this list of conditions and the following disclaimer in the
14 1.1 yamt * documentation and/or other materials provided with the distribution.
15 1.1 yamt *
16 1.1 yamt * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17 1.1 yamt * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 1.1 yamt * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 1.1 yamt * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20 1.1 yamt * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 1.1 yamt * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22 1.1 yamt * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23 1.1 yamt * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24 1.1 yamt * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 1.1 yamt * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 1.1 yamt * SUCH DAMAGE.
27 1.1 yamt */
28 1.1 yamt
29 1.1 yamt /*
30 1.1 yamt * radix tree
31 1.1 yamt *
32 1.1 yamt * it's designed to work efficiently with dense index distribution.
33 1.1 yamt * the memory consumption (number of necessary intermediate nodes)
34 1.1 yamt * heavily depends on index distribution. basically, more dense index
35 1.1 yamt * distribution consumes less nodes per item.
36 1.1 yamt * approximately,
37 1.1 yamt * the best case: about RADIX_TREE_PTR_PER_NODE items per node.
38 1.1 yamt * the worst case: RADIX_TREE_MAX_HEIGHT nodes per item.
39 1.1 yamt */
40 1.1 yamt
41 1.1 yamt #include <sys/cdefs.h>
42 1.1 yamt
43 1.2 yamt #if defined(_KERNEL) || defined(_STANDALONE)
44 1.15 yamt __KERNEL_RCSID(0, "$NetBSD: radixtree.c,v 1.15 2011/10/14 19:42:15 yamt Exp $");
45 1.1 yamt #include <sys/param.h>
46 1.3 yamt #include <sys/errno.h>
47 1.1 yamt #include <sys/pool.h>
48 1.1 yamt #include <sys/radixtree.h>
49 1.3 yamt #include <lib/libkern/libkern.h>
50 1.3 yamt #if defined(_STANDALONE)
51 1.3 yamt #include <lib/libsa/stand.h>
52 1.3 yamt #endif /* defined(_STANDALONE) */
53 1.2 yamt #else /* defined(_KERNEL) || defined(_STANDALONE) */
54 1.15 yamt __RCSID("$NetBSD: radixtree.c,v 1.15 2011/10/14 19:42:15 yamt Exp $");
55 1.1 yamt #include <assert.h>
56 1.1 yamt #include <errno.h>
57 1.1 yamt #include <stdbool.h>
58 1.1 yamt #include <stdlib.h>
59 1.8 yamt #include <string.h>
60 1.1 yamt #if 1
61 1.1 yamt #define KASSERT assert
62 1.1 yamt #else
63 1.1 yamt #define KASSERT(a) /* nothing */
64 1.1 yamt #endif
65 1.2 yamt #endif /* defined(_KERNEL) || defined(_STANDALONE) */
66 1.1 yamt
67 1.1 yamt #include <sys/radixtree.h>
68 1.1 yamt
69 1.1 yamt #define RADIX_TREE_BITS_PER_HEIGHT 4 /* XXX tune */
70 1.1 yamt #define RADIX_TREE_PTR_PER_NODE (1 << RADIX_TREE_BITS_PER_HEIGHT)
71 1.1 yamt #define RADIX_TREE_MAX_HEIGHT (64 / RADIX_TREE_BITS_PER_HEIGHT)
72 1.15 yamt #define RADIX_TREE_INVALID_HEIGHT (RADIX_TREE_MAX_HEIGHT + 1)
73 1.2 yamt __CTASSERT((64 % RADIX_TREE_BITS_PER_HEIGHT) == 0);
74 1.1 yamt
75 1.2 yamt __CTASSERT(((1 << RADIX_TREE_TAG_ID_MAX) & (sizeof(int) - 1)) == 0);
76 1.1 yamt #define RADIX_TREE_TAG_MASK ((1 << RADIX_TREE_TAG_ID_MAX) - 1)
77 1.1 yamt
78 1.1 yamt static inline void *
79 1.1 yamt entry_ptr(void *p)
80 1.1 yamt {
81 1.1 yamt
82 1.1 yamt return (void *)((uintptr_t)p & ~RADIX_TREE_TAG_MASK);
83 1.1 yamt }
84 1.1 yamt
85 1.1 yamt static inline unsigned int
86 1.1 yamt entry_tagmask(void *p)
87 1.1 yamt {
88 1.1 yamt
89 1.1 yamt return (uintptr_t)p & RADIX_TREE_TAG_MASK;
90 1.1 yamt }
91 1.1 yamt
92 1.1 yamt static inline void *
93 1.1 yamt entry_compose(void *p, unsigned int tagmask)
94 1.1 yamt {
95 1.1 yamt
96 1.1 yamt return (void *)((uintptr_t)p | tagmask);
97 1.1 yamt }
98 1.1 yamt
99 1.1 yamt static inline bool
100 1.1 yamt entry_match_p(void *p, unsigned int tagmask)
101 1.1 yamt {
102 1.1 yamt
103 1.1 yamt KASSERT(entry_ptr(p) != NULL || entry_tagmask(p) == 0);
104 1.1 yamt if (p == NULL) {
105 1.1 yamt return false;
106 1.1 yamt }
107 1.1 yamt if (tagmask == 0) {
108 1.1 yamt return true;
109 1.1 yamt }
110 1.1 yamt return (entry_tagmask(p) & tagmask) != 0;
111 1.1 yamt }
112 1.1 yamt
113 1.1 yamt static inline unsigned int
114 1.1 yamt tagid_to_mask(radix_tree_tagid_t id)
115 1.1 yamt {
116 1.1 yamt
117 1.6 yamt KASSERT(id >= 0);
118 1.6 yamt KASSERT(id < RADIX_TREE_TAG_ID_MAX);
119 1.1 yamt return 1U << id;
120 1.1 yamt }
121 1.1 yamt
122 1.1 yamt /*
123 1.1 yamt * radix_tree_node: an intermediate node
124 1.1 yamt *
125 1.1 yamt * we don't care the type of leaf nodes. they are just void *.
126 1.1 yamt */
127 1.1 yamt
128 1.1 yamt struct radix_tree_node {
129 1.1 yamt void *n_ptrs[RADIX_TREE_PTR_PER_NODE];
130 1.1 yamt unsigned int n_nptrs; /* # of non-NULL pointers in n_ptrs */
131 1.1 yamt };
132 1.1 yamt
133 1.7 yamt /*
134 1.7 yamt * any_children_tagmask:
135 1.7 yamt *
136 1.7 yamt * return OR'ed tagmask of the given node's children.
137 1.7 yamt */
138 1.7 yamt
139 1.1 yamt static unsigned int
140 1.13 yamt any_children_tagmask(const struct radix_tree_node *n)
141 1.1 yamt {
142 1.1 yamt unsigned int mask;
143 1.1 yamt int i;
144 1.1 yamt
145 1.1 yamt mask = 0;
146 1.1 yamt for (i = 0; i < RADIX_TREE_PTR_PER_NODE; i++) {
147 1.1 yamt mask |= (unsigned int)(uintptr_t)n->n_ptrs[i];
148 1.1 yamt }
149 1.1 yamt return mask & RADIX_TREE_TAG_MASK;
150 1.1 yamt }
151 1.1 yamt
152 1.1 yamt /*
153 1.1 yamt * p_refs[0].pptr == &t->t_root
154 1.1 yamt * :
155 1.1 yamt * p_refs[n].pptr == &(*p_refs[n-1])->n_ptrs[x]
156 1.1 yamt * :
157 1.1 yamt * :
158 1.1 yamt * p_refs[t->t_height].pptr == &leaf_pointer
159 1.1 yamt */
160 1.1 yamt
161 1.1 yamt struct radix_tree_path {
162 1.1 yamt struct radix_tree_node_ref {
163 1.1 yamt void **pptr;
164 1.1 yamt } p_refs[RADIX_TREE_MAX_HEIGHT + 1]; /* +1 for the root ptr */
165 1.15 yamt /*
166 1.15 yamt * p_lastidx is either the index of the last valid element of p_refs[]
167 1.15 yamt * or RADIX_TREE_INVALID_HEIGHT.
168 1.15 yamt * RADIX_TREE_INVALID_HEIGHT means that radix_tree_lookup_ptr found
169 1.15 yamt * that the height of the tree is not enough to cover the given index.
170 1.15 yamt */
171 1.10 yamt unsigned int p_lastidx;
172 1.1 yamt };
173 1.1 yamt
174 1.1 yamt static inline void **
175 1.13 yamt path_pptr(const struct radix_tree *t, const struct radix_tree_path *p,
176 1.1 yamt unsigned int height)
177 1.1 yamt {
178 1.1 yamt
179 1.1 yamt KASSERT(height <= t->t_height);
180 1.1 yamt return p->p_refs[height].pptr;
181 1.1 yamt }
182 1.1 yamt
183 1.1 yamt static inline struct radix_tree_node *
184 1.13 yamt path_node(const struct radix_tree * t, const struct radix_tree_path *p,
185 1.13 yamt unsigned int height)
186 1.1 yamt {
187 1.1 yamt
188 1.1 yamt KASSERT(height <= t->t_height);
189 1.1 yamt return entry_ptr(*path_pptr(t, p, height));
190 1.1 yamt }
191 1.1 yamt
192 1.1 yamt /*
193 1.1 yamt * radix_tree_init_tree:
194 1.1 yamt *
195 1.1 yamt * initialize a tree.
196 1.1 yamt */
197 1.1 yamt
198 1.1 yamt void
199 1.1 yamt radix_tree_init_tree(struct radix_tree *t)
200 1.1 yamt {
201 1.1 yamt
202 1.1 yamt t->t_height = 0;
203 1.1 yamt t->t_root = NULL;
204 1.1 yamt }
205 1.1 yamt
206 1.1 yamt /*
207 1.1 yamt * radix_tree_init_tree:
208 1.1 yamt *
209 1.1 yamt * clean up a tree.
210 1.1 yamt */
211 1.1 yamt
212 1.1 yamt void
213 1.1 yamt radix_tree_fini_tree(struct radix_tree *t)
214 1.1 yamt {
215 1.1 yamt
216 1.1 yamt KASSERT(t->t_root == NULL);
217 1.1 yamt KASSERT(t->t_height == 0);
218 1.1 yamt }
219 1.1 yamt
220 1.9 yamt bool
221 1.9 yamt radix_tree_empty_tree_p(struct radix_tree *t)
222 1.9 yamt {
223 1.9 yamt
224 1.9 yamt return t->t_root == NULL;
225 1.9 yamt }
226 1.9 yamt
227 1.3 yamt static void
228 1.3 yamt radix_tree_node_init(struct radix_tree_node *n)
229 1.3 yamt {
230 1.3 yamt
231 1.3 yamt memset(n, 0, sizeof(*n));
232 1.3 yamt }
233 1.3 yamt
234 1.1 yamt #if defined(_KERNEL)
235 1.2 yamt pool_cache_t radix_tree_node_cache __read_mostly;
236 1.1 yamt
237 1.1 yamt static int
238 1.1 yamt radix_tree_node_ctor(void *dummy, void *item, int flags)
239 1.1 yamt {
240 1.1 yamt struct radix_tree_node *n = item;
241 1.1 yamt
242 1.1 yamt KASSERT(dummy == NULL);
243 1.3 yamt radix_tree_node_init(n);
244 1.1 yamt return 0;
245 1.1 yamt }
246 1.1 yamt
247 1.1 yamt /*
248 1.1 yamt * radix_tree_init:
249 1.1 yamt *
250 1.1 yamt * initialize the subsystem.
251 1.1 yamt */
252 1.1 yamt
253 1.1 yamt void
254 1.1 yamt radix_tree_init(void)
255 1.1 yamt {
256 1.1 yamt
257 1.1 yamt radix_tree_node_cache = pool_cache_init(sizeof(struct radix_tree_node),
258 1.1 yamt 0, 0, 0, "radix_tree_node", NULL, IPL_NONE, radix_tree_node_ctor,
259 1.1 yamt NULL, NULL);
260 1.1 yamt KASSERT(radix_tree_node_cache != NULL);
261 1.1 yamt }
262 1.1 yamt #endif /* defined(_KERNEL) */
263 1.1 yamt
264 1.1 yamt static bool __unused
265 1.1 yamt radix_tree_node_clean_p(const struct radix_tree_node *n)
266 1.1 yamt {
267 1.1 yamt unsigned int i;
268 1.1 yamt
269 1.1 yamt if (n->n_nptrs != 0) {
270 1.1 yamt return false;
271 1.1 yamt }
272 1.1 yamt for (i = 0; i < RADIX_TREE_PTR_PER_NODE; i++) {
273 1.1 yamt if (n->n_ptrs[i] != NULL) {
274 1.1 yamt return false;
275 1.1 yamt }
276 1.1 yamt }
277 1.1 yamt return true;
278 1.1 yamt }
279 1.1 yamt
280 1.1 yamt static struct radix_tree_node *
281 1.1 yamt radix_tree_alloc_node(void)
282 1.1 yamt {
283 1.1 yamt struct radix_tree_node *n;
284 1.1 yamt
285 1.1 yamt #if defined(_KERNEL)
286 1.1 yamt n = pool_cache_get(radix_tree_node_cache, PR_NOWAIT);
287 1.1 yamt #else /* defined(_KERNEL) */
288 1.3 yamt #if defined(_STANDALONE)
289 1.3 yamt n = alloc(sizeof(*n));
290 1.3 yamt #else /* defined(_STANDALONE) */
291 1.3 yamt n = malloc(sizeof(*n));
292 1.3 yamt #endif /* defined(_STANDALONE) */
293 1.3 yamt if (n != NULL) {
294 1.3 yamt radix_tree_node_init(n);
295 1.3 yamt }
296 1.1 yamt #endif /* defined(_KERNEL) */
297 1.1 yamt KASSERT(n == NULL || radix_tree_node_clean_p(n));
298 1.1 yamt return n;
299 1.1 yamt }
300 1.1 yamt
301 1.1 yamt static void
302 1.1 yamt radix_tree_free_node(struct radix_tree_node *n)
303 1.1 yamt {
304 1.1 yamt
305 1.1 yamt KASSERT(radix_tree_node_clean_p(n));
306 1.1 yamt #if defined(_KERNEL)
307 1.1 yamt pool_cache_put(radix_tree_node_cache, n);
308 1.3 yamt #elif defined(_STANDALONE)
309 1.3 yamt dealloc(n, sizeof(*n));
310 1.3 yamt #else
311 1.1 yamt free(n);
312 1.3 yamt #endif
313 1.1 yamt }
314 1.1 yamt
315 1.1 yamt static int
316 1.1 yamt radix_tree_grow(struct radix_tree *t, unsigned int newheight)
317 1.1 yamt {
318 1.1 yamt const unsigned int tagmask = entry_tagmask(t->t_root);
319 1.1 yamt
320 1.1 yamt KASSERT(newheight <= 64 / RADIX_TREE_BITS_PER_HEIGHT);
321 1.1 yamt if (t->t_root == NULL) {
322 1.1 yamt t->t_height = newheight;
323 1.1 yamt return 0;
324 1.1 yamt }
325 1.1 yamt while (t->t_height < newheight) {
326 1.1 yamt struct radix_tree_node *n;
327 1.1 yamt
328 1.1 yamt n = radix_tree_alloc_node();
329 1.1 yamt if (n == NULL) {
330 1.1 yamt /*
331 1.1 yamt * don't bother to revert our changes.
332 1.1 yamt * the caller will likely retry.
333 1.1 yamt */
334 1.1 yamt return ENOMEM;
335 1.1 yamt }
336 1.1 yamt n->n_nptrs = 1;
337 1.1 yamt n->n_ptrs[0] = t->t_root;
338 1.1 yamt t->t_root = entry_compose(n, tagmask);
339 1.1 yamt t->t_height++;
340 1.1 yamt }
341 1.1 yamt return 0;
342 1.1 yamt }
343 1.1 yamt
344 1.5 yamt /*
345 1.5 yamt * radix_tree_lookup_ptr:
346 1.5 yamt *
347 1.5 yamt * an internal helper function used for various exported functions.
348 1.5 yamt *
349 1.5 yamt * return the pointer to store the node for the given index.
350 1.5 yamt *
351 1.5 yamt * if alloc is true, try to allocate the storage. (note for _KERNEL:
352 1.5 yamt * in that case, this function can block.) if the allocation failed or
353 1.5 yamt * alloc is false, return NULL.
354 1.5 yamt *
355 1.5 yamt * if path is not NULL, fill it for the caller's investigation.
356 1.5 yamt *
357 1.5 yamt * if tagmask is not zero, search only for nodes with the tag set.
358 1.15 yamt * note that, however, this function doesn't check the tagmask for the leaf
359 1.15 yamt * pointer. it's a caller's responsibility to investigate the value which
360 1.15 yamt * is pointed by the returned pointer if necessary.
361 1.5 yamt *
362 1.5 yamt * while this function is a bit large, as it's called with some constant
363 1.5 yamt * arguments, inlining might have benefits. anyway, a compiler will decide.
364 1.5 yamt */
365 1.5 yamt
366 1.1 yamt static inline void **
367 1.1 yamt radix_tree_lookup_ptr(struct radix_tree *t, uint64_t idx,
368 1.1 yamt struct radix_tree_path *path, bool alloc, const unsigned int tagmask)
369 1.1 yamt {
370 1.1 yamt struct radix_tree_node *n;
371 1.1 yamt int hshift = RADIX_TREE_BITS_PER_HEIGHT * t->t_height;
372 1.1 yamt int shift;
373 1.1 yamt void **vpp;
374 1.1 yamt const uint64_t mask = (UINT64_C(1) << RADIX_TREE_BITS_PER_HEIGHT) - 1;
375 1.1 yamt struct radix_tree_node_ref *refs = NULL;
376 1.1 yamt
377 1.5 yamt /*
378 1.5 yamt * check unsupported combinations
379 1.5 yamt */
380 1.1 yamt KASSERT(tagmask == 0 || !alloc);
381 1.1 yamt KASSERT(path == NULL || !alloc);
382 1.1 yamt vpp = &t->t_root;
383 1.1 yamt if (path != NULL) {
384 1.1 yamt refs = path->p_refs;
385 1.1 yamt refs->pptr = vpp;
386 1.1 yamt }
387 1.1 yamt n = NULL;
388 1.1 yamt for (shift = 64 - RADIX_TREE_BITS_PER_HEIGHT; shift >= 0;) {
389 1.1 yamt struct radix_tree_node *c;
390 1.1 yamt void *entry;
391 1.1 yamt const uint64_t i = (idx >> shift) & mask;
392 1.1 yamt
393 1.1 yamt if (shift >= hshift) {
394 1.1 yamt unsigned int newheight;
395 1.1 yamt
396 1.1 yamt KASSERT(vpp == &t->t_root);
397 1.1 yamt if (i == 0) {
398 1.1 yamt shift -= RADIX_TREE_BITS_PER_HEIGHT;
399 1.1 yamt continue;
400 1.1 yamt }
401 1.1 yamt if (!alloc) {
402 1.1 yamt if (path != NULL) {
403 1.1 yamt KASSERT((refs - path->p_refs) == 0);
404 1.15 yamt path->p_lastidx =
405 1.15 yamt RADIX_TREE_INVALID_HEIGHT;
406 1.1 yamt }
407 1.1 yamt return NULL;
408 1.1 yamt }
409 1.1 yamt newheight = shift / RADIX_TREE_BITS_PER_HEIGHT + 1;
410 1.1 yamt if (radix_tree_grow(t, newheight)) {
411 1.1 yamt return NULL;
412 1.1 yamt }
413 1.1 yamt hshift = RADIX_TREE_BITS_PER_HEIGHT * t->t_height;
414 1.1 yamt }
415 1.1 yamt entry = *vpp;
416 1.1 yamt c = entry_ptr(entry);
417 1.1 yamt if (c == NULL ||
418 1.1 yamt (tagmask != 0 &&
419 1.1 yamt (entry_tagmask(entry) & tagmask) == 0)) {
420 1.1 yamt if (!alloc) {
421 1.1 yamt if (path != NULL) {
422 1.1 yamt path->p_lastidx = refs - path->p_refs;
423 1.1 yamt }
424 1.1 yamt return NULL;
425 1.1 yamt }
426 1.1 yamt c = radix_tree_alloc_node();
427 1.1 yamt if (c == NULL) {
428 1.1 yamt return NULL;
429 1.1 yamt }
430 1.1 yamt *vpp = c;
431 1.1 yamt if (n != NULL) {
432 1.1 yamt KASSERT(n->n_nptrs < RADIX_TREE_PTR_PER_NODE);
433 1.1 yamt n->n_nptrs++;
434 1.1 yamt }
435 1.1 yamt }
436 1.1 yamt n = c;
437 1.1 yamt vpp = &n->n_ptrs[i];
438 1.1 yamt if (path != NULL) {
439 1.1 yamt refs++;
440 1.1 yamt refs->pptr = vpp;
441 1.1 yamt }
442 1.1 yamt shift -= RADIX_TREE_BITS_PER_HEIGHT;
443 1.1 yamt }
444 1.1 yamt if (alloc) {
445 1.1 yamt KASSERT(*vpp == NULL);
446 1.1 yamt if (n != NULL) {
447 1.1 yamt KASSERT(n->n_nptrs < RADIX_TREE_PTR_PER_NODE);
448 1.1 yamt n->n_nptrs++;
449 1.1 yamt }
450 1.1 yamt }
451 1.1 yamt if (path != NULL) {
452 1.1 yamt path->p_lastidx = refs - path->p_refs;
453 1.1 yamt }
454 1.1 yamt return vpp;
455 1.1 yamt }
456 1.1 yamt
457 1.1 yamt /*
458 1.1 yamt * radix_tree_insert_node:
459 1.1 yamt *
460 1.1 yamt * insert the node at idx.
461 1.1 yamt * it's illegal to insert NULL.
462 1.1 yamt * it's illegal to insert a non-aligned pointer.
463 1.1 yamt *
464 1.1 yamt * this function returns ENOMEM if necessary memory allocation failed.
465 1.1 yamt * otherwise, this function returns 0.
466 1.1 yamt *
467 1.1 yamt * note that inserting a node can involves memory allocation for intermediate
468 1.1 yamt * nodes. if _KERNEL, it's done with non-blocking IPL_NONE memory allocation.
469 1.4 yamt *
470 1.4 yamt * for the newly inserted node, all tags are cleared.
471 1.1 yamt */
472 1.1 yamt
473 1.1 yamt int
474 1.1 yamt radix_tree_insert_node(struct radix_tree *t, uint64_t idx, void *p)
475 1.1 yamt {
476 1.1 yamt void **vpp;
477 1.1 yamt
478 1.1 yamt KASSERT(p != NULL);
479 1.1 yamt KASSERT(entry_compose(p, 0) == p);
480 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, true, 0);
481 1.1 yamt if (vpp == NULL) {
482 1.1 yamt return ENOMEM;
483 1.1 yamt }
484 1.1 yamt KASSERT(*vpp == NULL);
485 1.1 yamt *vpp = p;
486 1.1 yamt return 0;
487 1.1 yamt }
488 1.1 yamt
489 1.4 yamt /*
490 1.4 yamt * radix_tree_replace_node:
491 1.4 yamt *
492 1.4 yamt * replace a node at the given index with the given node.
493 1.4 yamt * return the old node.
494 1.4 yamt * it's illegal to try to replace a node which has not been inserted.
495 1.4 yamt *
496 1.4 yamt * this function doesn't change tags.
497 1.4 yamt */
498 1.4 yamt
499 1.1 yamt void *
500 1.1 yamt radix_tree_replace_node(struct radix_tree *t, uint64_t idx, void *p)
501 1.1 yamt {
502 1.1 yamt void **vpp;
503 1.1 yamt void *oldp;
504 1.1 yamt
505 1.1 yamt KASSERT(p != NULL);
506 1.1 yamt KASSERT(entry_compose(p, 0) == p);
507 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0);
508 1.1 yamt KASSERT(vpp != NULL);
509 1.1 yamt oldp = *vpp;
510 1.1 yamt KASSERT(oldp != NULL);
511 1.1 yamt *vpp = entry_compose(p, entry_tagmask(*vpp));
512 1.1 yamt return entry_ptr(oldp);
513 1.1 yamt }
514 1.1 yamt
515 1.1 yamt /*
516 1.1 yamt * radix_tree_remove_node:
517 1.1 yamt *
518 1.1 yamt * remove the node at idx.
519 1.1 yamt * it's illegal to try to remove a node which has not been inserted.
520 1.1 yamt */
521 1.1 yamt
522 1.1 yamt void *
523 1.1 yamt radix_tree_remove_node(struct radix_tree *t, uint64_t idx)
524 1.1 yamt {
525 1.1 yamt struct radix_tree_path path;
526 1.1 yamt void **vpp;
527 1.1 yamt void *oldp;
528 1.1 yamt int i;
529 1.1 yamt
530 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0);
531 1.1 yamt KASSERT(vpp != NULL);
532 1.1 yamt oldp = *vpp;
533 1.1 yamt KASSERT(oldp != NULL);
534 1.1 yamt KASSERT(path.p_lastidx == t->t_height);
535 1.1 yamt KASSERT(vpp == path_pptr(t, &path, path.p_lastidx));
536 1.1 yamt *vpp = NULL;
537 1.1 yamt for (i = t->t_height - 1; i >= 0; i--) {
538 1.1 yamt void *entry;
539 1.1 yamt struct radix_tree_node ** const pptr =
540 1.1 yamt (struct radix_tree_node **)path_pptr(t, &path, i);
541 1.1 yamt struct radix_tree_node *n;
542 1.1 yamt
543 1.1 yamt KASSERT(pptr != NULL);
544 1.1 yamt entry = *pptr;
545 1.1 yamt n = entry_ptr(entry);
546 1.1 yamt KASSERT(n != NULL);
547 1.1 yamt KASSERT(n->n_nptrs > 0);
548 1.1 yamt n->n_nptrs--;
549 1.1 yamt if (n->n_nptrs > 0) {
550 1.1 yamt break;
551 1.1 yamt }
552 1.1 yamt radix_tree_free_node(n);
553 1.1 yamt *pptr = NULL;
554 1.1 yamt }
555 1.1 yamt /*
556 1.1 yamt * fix up height
557 1.1 yamt */
558 1.1 yamt if (i < 0) {
559 1.1 yamt KASSERT(t->t_root == NULL);
560 1.1 yamt t->t_height = 0;
561 1.1 yamt }
562 1.1 yamt /*
563 1.1 yamt * update tags
564 1.1 yamt */
565 1.1 yamt for (; i >= 0; i--) {
566 1.1 yamt void *entry;
567 1.1 yamt struct radix_tree_node ** const pptr =
568 1.1 yamt (struct radix_tree_node **)path_pptr(t, &path, i);
569 1.1 yamt struct radix_tree_node *n;
570 1.1 yamt unsigned int newmask;
571 1.1 yamt
572 1.1 yamt KASSERT(pptr != NULL);
573 1.1 yamt entry = *pptr;
574 1.1 yamt n = entry_ptr(entry);
575 1.1 yamt KASSERT(n != NULL);
576 1.1 yamt KASSERT(n->n_nptrs > 0);
577 1.1 yamt newmask = any_children_tagmask(n);
578 1.1 yamt if (newmask == entry_tagmask(entry)) {
579 1.1 yamt break;
580 1.1 yamt }
581 1.1 yamt *pptr = entry_compose(n, newmask);
582 1.1 yamt }
583 1.1 yamt /*
584 1.1 yamt * XXX is it worth to try to reduce height?
585 1.1 yamt * if we do that, make radix_tree_grow rollback its change as well.
586 1.1 yamt */
587 1.1 yamt return entry_ptr(oldp);
588 1.1 yamt }
589 1.1 yamt
590 1.1 yamt /*
591 1.1 yamt * radix_tree_lookup_node:
592 1.1 yamt *
593 1.1 yamt * returns the node at idx.
594 1.1 yamt * returns NULL if nothing is found at idx.
595 1.1 yamt */
596 1.1 yamt
597 1.1 yamt void *
598 1.1 yamt radix_tree_lookup_node(struct radix_tree *t, uint64_t idx)
599 1.1 yamt {
600 1.1 yamt void **vpp;
601 1.1 yamt
602 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0);
603 1.1 yamt if (vpp == NULL) {
604 1.1 yamt return NULL;
605 1.1 yamt }
606 1.1 yamt return entry_ptr(*vpp);
607 1.1 yamt }
608 1.1 yamt
609 1.1 yamt static inline void
610 1.1 yamt gang_lookup_init(struct radix_tree *t, uint64_t idx,
611 1.1 yamt struct radix_tree_path *path, const unsigned int tagmask)
612 1.1 yamt {
613 1.1 yamt void **vpp;
614 1.1 yamt
615 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, path, false, tagmask);
616 1.1 yamt KASSERT(vpp == NULL ||
617 1.1 yamt vpp == path_pptr(t, path, path->p_lastidx));
618 1.1 yamt KASSERT(&t->t_root == path_pptr(t, path, 0));
619 1.15 yamt KASSERT(path->p_lastidx == RADIX_TREE_INVALID_HEIGHT ||
620 1.15 yamt path->p_lastidx == t->t_height ||
621 1.15 yamt !entry_match_p(*path_pptr(t, path, path->p_lastidx), tagmask));
622 1.1 yamt }
623 1.1 yamt
624 1.15 yamt /*
625 1.15 yamt * gang_lookup_scan:
626 1.15 yamt *
627 1.15 yamt * a helper routine for radix_tree_gang_lookup_node and its variants.
628 1.15 yamt */
629 1.15 yamt
630 1.1 yamt static inline unsigned int
631 1.15 yamt __attribute__((__always_inline__))
632 1.1 yamt gang_lookup_scan(struct radix_tree *t, struct radix_tree_path *path,
633 1.15 yamt void **results, unsigned int maxresults, const unsigned int tagmask,
634 1.15 yamt bool reverse)
635 1.1 yamt {
636 1.15 yamt
637 1.15 yamt /*
638 1.15 yamt * we keep the path updated only for lastidx-1.
639 1.15 yamt * vpp is what path_pptr(t, path, lastidx) would be.
640 1.15 yamt */
641 1.1 yamt void **vpp;
642 1.10 yamt unsigned int nfound;
643 1.1 yamt unsigned int lastidx;
644 1.15 yamt /*
645 1.15 yamt * set up scan direction dependant constants so that we can iterate
646 1.15 yamt * n_ptrs as the following.
647 1.15 yamt *
648 1.15 yamt * for (i = first; i != guard; i += step)
649 1.15 yamt * visit n->n_ptrs[i];
650 1.15 yamt */
651 1.15 yamt const int step = reverse ? -1 : 1;
652 1.15 yamt const unsigned int first = reverse ? RADIX_TREE_PTR_PER_NODE - 1 : 0;
653 1.15 yamt const unsigned int last = reverse ? 0 : RADIX_TREE_PTR_PER_NODE - 1;
654 1.15 yamt const unsigned int guard = last + step;
655 1.1 yamt
656 1.1 yamt KASSERT(maxresults > 0);
657 1.15 yamt KASSERT(&t->t_root == path_pptr(t, path, 0));
658 1.1 yamt lastidx = path->p_lastidx;
659 1.15 yamt KASSERT(lastidx == RADIX_TREE_INVALID_HEIGHT ||
660 1.15 yamt lastidx == t->t_height ||
661 1.15 yamt !entry_match_p(*path_pptr(t, path, lastidx), tagmask));
662 1.15 yamt nfound = 0;
663 1.15 yamt if (lastidx == RADIX_TREE_INVALID_HEIGHT) {
664 1.15 yamt if (reverse) {
665 1.15 yamt lastidx = 0;
666 1.15 yamt vpp = path_pptr(t, path, lastidx);
667 1.15 yamt goto descend;
668 1.15 yamt }
669 1.1 yamt return 0;
670 1.1 yamt }
671 1.1 yamt vpp = path_pptr(t, path, lastidx);
672 1.1 yamt while (/*CONSTCOND*/true) {
673 1.1 yamt struct radix_tree_node *n;
674 1.10 yamt unsigned int i;
675 1.1 yamt
676 1.1 yamt if (entry_match_p(*vpp, tagmask)) {
677 1.1 yamt KASSERT(lastidx == t->t_height);
678 1.1 yamt /*
679 1.15 yamt * record the matching non-NULL leaf.
680 1.1 yamt */
681 1.1 yamt results[nfound] = entry_ptr(*vpp);
682 1.1 yamt nfound++;
683 1.1 yamt if (nfound == maxresults) {
684 1.1 yamt return nfound;
685 1.1 yamt }
686 1.1 yamt }
687 1.1 yamt scan_siblings:
688 1.1 yamt /*
689 1.15 yamt * try to find the next matching non-NULL sibling.
690 1.1 yamt */
691 1.15 yamt if (lastidx == 0) {
692 1.15 yamt /*
693 1.15 yamt * the root has no siblings.
694 1.15 yamt * we've done.
695 1.15 yamt */
696 1.15 yamt KASSERT(vpp == &t->t_root);
697 1.15 yamt break;
698 1.15 yamt }
699 1.1 yamt n = path_node(t, path, lastidx - 1);
700 1.1 yamt if (*vpp != NULL && n->n_nptrs == 1) {
701 1.1 yamt /*
702 1.15 yamt * optimization; if the node has only a single pointer
703 1.15 yamt * and we've already visited it, there's no point to
704 1.15 yamt * keep scanning in this node.
705 1.1 yamt */
706 1.1 yamt goto no_siblings;
707 1.1 yamt }
708 1.15 yamt for (i = vpp - n->n_ptrs + step; i != guard; i += step) {
709 1.15 yamt KASSERT(i < RADIX_TREE_PTR_PER_NODE);
710 1.1 yamt if (entry_match_p(n->n_ptrs[i], tagmask)) {
711 1.1 yamt vpp = &n->n_ptrs[i];
712 1.1 yamt break;
713 1.1 yamt }
714 1.1 yamt }
715 1.15 yamt if (i == guard) {
716 1.1 yamt no_siblings:
717 1.1 yamt /*
718 1.1 yamt * not found. go to parent.
719 1.1 yamt */
720 1.1 yamt lastidx--;
721 1.1 yamt vpp = path_pptr(t, path, lastidx);
722 1.1 yamt goto scan_siblings;
723 1.1 yamt }
724 1.15 yamt descend:
725 1.1 yamt /*
726 1.15 yamt * following the left-most (or right-most in the case of
727 1.15 yamt * reverse scan) child node, decend until reaching the leaf or
728 1.15 yamt * an non-matching entry.
729 1.1 yamt */
730 1.1 yamt while (entry_match_p(*vpp, tagmask) && lastidx < t->t_height) {
731 1.15 yamt /*
732 1.15 yamt * save vpp in the path so that we can come back to this
733 1.15 yamt * node after finishing visiting children.
734 1.15 yamt */
735 1.15 yamt path->p_refs[lastidx].pptr = vpp;
736 1.1 yamt n = entry_ptr(*vpp);
737 1.15 yamt vpp = &n->n_ptrs[first];
738 1.1 yamt lastidx++;
739 1.1 yamt }
740 1.1 yamt }
741 1.15 yamt return nfound;
742 1.1 yamt }
743 1.1 yamt
744 1.1 yamt /*
745 1.1 yamt * radix_tree_gang_lookup_node:
746 1.1 yamt *
747 1.1 yamt * search nodes starting from idx in the ascending order.
748 1.1 yamt * results should be an array large enough to hold maxresults pointers.
749 1.1 yamt * returns the number of nodes found, up to maxresults.
750 1.1 yamt * returning less than maxresults means there are no more nodes.
751 1.1 yamt *
752 1.1 yamt * the result of this function is semantically equivalent to what could be
753 1.1 yamt * obtained by repeated calls of radix_tree_lookup_node with increasing index.
754 1.1 yamt * but this function is much faster when node indexes are distributed sparsely.
755 1.1 yamt *
756 1.1 yamt * note that this function doesn't return exact values of node indexes of
757 1.1 yamt * found nodes. if they are important for a caller, it's the caller's
758 1.1 yamt * responsibility to check them, typically by examinining the returned nodes
759 1.1 yamt * using some caller-specific knowledge about them.
760 1.1 yamt */
761 1.1 yamt
762 1.1 yamt unsigned int
763 1.1 yamt radix_tree_gang_lookup_node(struct radix_tree *t, uint64_t idx,
764 1.1 yamt void **results, unsigned int maxresults)
765 1.1 yamt {
766 1.1 yamt struct radix_tree_path path;
767 1.1 yamt
768 1.1 yamt gang_lookup_init(t, idx, &path, 0);
769 1.15 yamt return gang_lookup_scan(t, &path, results, maxresults, 0, false);
770 1.15 yamt }
771 1.15 yamt
772 1.15 yamt /*
773 1.15 yamt * radix_tree_gang_lookup_node_reverse:
774 1.15 yamt *
775 1.15 yamt * same as radix_tree_gang_lookup_node except that this one scans the
776 1.15 yamt * tree in the reverse order. ie. descending index values.
777 1.15 yamt */
778 1.15 yamt
779 1.15 yamt unsigned int
780 1.15 yamt radix_tree_gang_lookup_node_reverse(struct radix_tree *t, uint64_t idx,
781 1.15 yamt void **results, unsigned int maxresults)
782 1.15 yamt {
783 1.15 yamt struct radix_tree_path path;
784 1.15 yamt
785 1.15 yamt gang_lookup_init(t, idx, &path, 0);
786 1.15 yamt return gang_lookup_scan(t, &path, results, maxresults, 0, true);
787 1.1 yamt }
788 1.1 yamt
789 1.1 yamt /*
790 1.1 yamt * radix_tree_gang_lookup_tagged_node:
791 1.1 yamt *
792 1.1 yamt * same as radix_tree_gang_lookup_node except that this one only returns
793 1.1 yamt * nodes tagged with tagid.
794 1.1 yamt */
795 1.1 yamt
796 1.1 yamt unsigned int
797 1.1 yamt radix_tree_gang_lookup_tagged_node(struct radix_tree *t, uint64_t idx,
798 1.1 yamt void **results, unsigned int maxresults, radix_tree_tagid_t tagid)
799 1.1 yamt {
800 1.1 yamt struct radix_tree_path path;
801 1.1 yamt const unsigned int tagmask = tagid_to_mask(tagid);
802 1.1 yamt
803 1.1 yamt gang_lookup_init(t, idx, &path, tagmask);
804 1.15 yamt return gang_lookup_scan(t, &path, results, maxresults, tagmask, false);
805 1.15 yamt }
806 1.15 yamt
807 1.15 yamt /*
808 1.15 yamt * radix_tree_gang_lookup_tagged_node_reverse:
809 1.15 yamt *
810 1.15 yamt * same as radix_tree_gang_lookup_tagged_node except that this one scans the
811 1.15 yamt * tree in the reverse order. ie. descending index values.
812 1.15 yamt */
813 1.15 yamt
814 1.15 yamt unsigned int
815 1.15 yamt radix_tree_gang_lookup_tagged_node_reverse(struct radix_tree *t, uint64_t idx,
816 1.15 yamt void **results, unsigned int maxresults, radix_tree_tagid_t tagid)
817 1.15 yamt {
818 1.15 yamt struct radix_tree_path path;
819 1.15 yamt const unsigned int tagmask = tagid_to_mask(tagid);
820 1.15 yamt
821 1.15 yamt gang_lookup_init(t, idx, &path, tagmask);
822 1.15 yamt return gang_lookup_scan(t, &path, results, maxresults, tagmask, true);
823 1.1 yamt }
824 1.1 yamt
825 1.4 yamt /*
826 1.4 yamt * radix_tree_get_tag:
827 1.4 yamt *
828 1.4 yamt * return if the tag is set for the node at the given index. (true if set)
829 1.4 yamt * it's illegal to call this function for a node which has not been inserted.
830 1.4 yamt */
831 1.4 yamt
832 1.1 yamt bool
833 1.1 yamt radix_tree_get_tag(struct radix_tree *t, uint64_t idx,
834 1.1 yamt radix_tree_tagid_t tagid)
835 1.1 yamt {
836 1.1 yamt #if 1
837 1.1 yamt const unsigned int tagmask = tagid_to_mask(tagid);
838 1.1 yamt void **vpp;
839 1.1 yamt
840 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, false, tagmask);
841 1.1 yamt if (vpp == NULL) {
842 1.1 yamt return false;
843 1.1 yamt }
844 1.1 yamt KASSERT(*vpp != NULL);
845 1.1 yamt return (entry_tagmask(*vpp) & tagmask) != 0;
846 1.1 yamt #else
847 1.1 yamt const unsigned int tagmask = tagid_to_mask(tagid);
848 1.1 yamt void **vpp;
849 1.1 yamt
850 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, NULL, false, 0);
851 1.1 yamt KASSERT(vpp != NULL);
852 1.1 yamt return (entry_tagmask(*vpp) & tagmask) != 0;
853 1.1 yamt #endif
854 1.1 yamt }
855 1.1 yamt
856 1.4 yamt /*
857 1.4 yamt * radix_tree_set_tag:
858 1.4 yamt *
859 1.4 yamt * set the tag for the node at the given index.
860 1.4 yamt * it's illegal to call this function for a node which has not been inserted.
861 1.4 yamt */
862 1.4 yamt
863 1.1 yamt void
864 1.1 yamt radix_tree_set_tag(struct radix_tree *t, uint64_t idx,
865 1.1 yamt radix_tree_tagid_t tagid)
866 1.1 yamt {
867 1.1 yamt struct radix_tree_path path;
868 1.1 yamt const unsigned int tagmask = tagid_to_mask(tagid);
869 1.1 yamt void **vpp;
870 1.1 yamt int i;
871 1.1 yamt
872 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0);
873 1.1 yamt KASSERT(vpp != NULL);
874 1.1 yamt KASSERT(*vpp != NULL);
875 1.1 yamt KASSERT(path.p_lastidx == t->t_height);
876 1.1 yamt KASSERT(vpp == path_pptr(t, &path, path.p_lastidx));
877 1.1 yamt for (i = t->t_height; i >= 0; i--) {
878 1.1 yamt void ** const pptr = (void **)path_pptr(t, &path, i);
879 1.1 yamt void *entry;
880 1.1 yamt
881 1.1 yamt KASSERT(pptr != NULL);
882 1.1 yamt entry = *pptr;
883 1.1 yamt if ((entry_tagmask(entry) & tagmask) != 0) {
884 1.1 yamt break;
885 1.1 yamt }
886 1.1 yamt *pptr = (void *)((uintptr_t)entry | tagmask);
887 1.1 yamt }
888 1.1 yamt }
889 1.1 yamt
890 1.4 yamt /*
891 1.4 yamt * radix_tree_clear_tag:
892 1.4 yamt *
893 1.4 yamt * clear the tag for the node at the given index.
894 1.4 yamt * it's illegal to call this function for a node which has not been inserted.
895 1.4 yamt */
896 1.4 yamt
897 1.1 yamt void
898 1.1 yamt radix_tree_clear_tag(struct radix_tree *t, uint64_t idx,
899 1.1 yamt radix_tree_tagid_t tagid)
900 1.1 yamt {
901 1.1 yamt struct radix_tree_path path;
902 1.1 yamt const unsigned int tagmask = tagid_to_mask(tagid);
903 1.1 yamt void **vpp;
904 1.1 yamt int i;
905 1.1 yamt
906 1.1 yamt vpp = radix_tree_lookup_ptr(t, idx, &path, false, 0);
907 1.1 yamt KASSERT(vpp != NULL);
908 1.1 yamt KASSERT(*vpp != NULL);
909 1.1 yamt KASSERT(path.p_lastidx == t->t_height);
910 1.1 yamt KASSERT(vpp == path_pptr(t, &path, path.p_lastidx));
911 1.7 yamt /*
912 1.7 yamt * if already cleared, nothing to do
913 1.7 yamt */
914 1.1 yamt if ((entry_tagmask(*vpp) & tagmask) == 0) {
915 1.1 yamt return;
916 1.1 yamt }
917 1.7 yamt /*
918 1.7 yamt * clear the tag only if no children have the tag.
919 1.7 yamt */
920 1.1 yamt for (i = t->t_height; i >= 0; i--) {
921 1.1 yamt void ** const pptr = (void **)path_pptr(t, &path, i);
922 1.1 yamt void *entry;
923 1.1 yamt
924 1.1 yamt KASSERT(pptr != NULL);
925 1.1 yamt entry = *pptr;
926 1.1 yamt KASSERT((entry_tagmask(entry) & tagmask) != 0);
927 1.1 yamt *pptr = entry_compose(entry_ptr(entry),
928 1.1 yamt entry_tagmask(entry) & ~tagmask);
929 1.7 yamt /*
930 1.7 yamt * check if we should proceed to process the next level.
931 1.7 yamt */
932 1.7 yamt if (0 < i) {
933 1.1 yamt struct radix_tree_node *n = path_node(t, &path, i - 1);
934 1.1 yamt
935 1.1 yamt if ((any_children_tagmask(n) & tagmask) != 0) {
936 1.1 yamt break;
937 1.1 yamt }
938 1.1 yamt }
939 1.1 yamt }
940 1.1 yamt }
941 1.1 yamt
942 1.1 yamt #if defined(UNITTEST)
943 1.1 yamt
944 1.1 yamt #include <inttypes.h>
945 1.1 yamt #include <stdio.h>
946 1.1 yamt
947 1.1 yamt static void
948 1.1 yamt radix_tree_dump_node(const struct radix_tree *t, void *vp,
949 1.1 yamt uint64_t offset, unsigned int height)
950 1.1 yamt {
951 1.1 yamt struct radix_tree_node *n;
952 1.1 yamt unsigned int i;
953 1.1 yamt
954 1.1 yamt for (i = 0; i < t->t_height - height; i++) {
955 1.1 yamt printf(" ");
956 1.1 yamt }
957 1.1 yamt if (entry_tagmask(vp) == 0) {
958 1.1 yamt printf("[%" PRIu64 "] %p", offset, entry_ptr(vp));
959 1.1 yamt } else {
960 1.1 yamt printf("[%" PRIu64 "] %p (tagmask=0x%x)", offset, entry_ptr(vp),
961 1.1 yamt entry_tagmask(vp));
962 1.1 yamt }
963 1.1 yamt if (height == 0) {
964 1.1 yamt printf(" (leaf)\n");
965 1.1 yamt return;
966 1.1 yamt }
967 1.1 yamt n = entry_ptr(vp);
968 1.1 yamt assert(any_children_tagmask(n) == entry_tagmask(vp));
969 1.1 yamt printf(" (%u children)\n", n->n_nptrs);
970 1.1 yamt for (i = 0; i < __arraycount(n->n_ptrs); i++) {
971 1.1 yamt void *c;
972 1.1 yamt
973 1.1 yamt c = n->n_ptrs[i];
974 1.1 yamt if (c == NULL) {
975 1.1 yamt continue;
976 1.1 yamt }
977 1.1 yamt radix_tree_dump_node(t, c,
978 1.1 yamt offset + i * (UINT64_C(1) <<
979 1.1 yamt (RADIX_TREE_BITS_PER_HEIGHT * (height - 1))), height - 1);
980 1.1 yamt }
981 1.1 yamt }
982 1.1 yamt
983 1.1 yamt void radix_tree_dump(const struct radix_tree *);
984 1.1 yamt
985 1.1 yamt void
986 1.1 yamt radix_tree_dump(const struct radix_tree *t)
987 1.1 yamt {
988 1.1 yamt
989 1.1 yamt printf("tree %p height=%u\n", t, t->t_height);
990 1.1 yamt radix_tree_dump_node(t, t->t_root, 0, t->t_height);
991 1.1 yamt }
992 1.1 yamt
993 1.1 yamt static void
994 1.1 yamt test1(void)
995 1.1 yamt {
996 1.1 yamt struct radix_tree s;
997 1.1 yamt struct radix_tree *t = &s;
998 1.1 yamt void *results[3];
999 1.1 yamt
1000 1.1 yamt radix_tree_init_tree(t);
1001 1.1 yamt radix_tree_dump(t);
1002 1.1 yamt assert(radix_tree_lookup_node(t, 0) == NULL);
1003 1.1 yamt assert(radix_tree_lookup_node(t, 1000) == NULL);
1004 1.15 yamt assert(radix_tree_gang_lookup_node(t, 0, results, 3) == 0);
1005 1.15 yamt assert(radix_tree_gang_lookup_node(t, 1000, results, 3) == 0);
1006 1.15 yamt assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3) == 0);
1007 1.15 yamt assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3) == 0);
1008 1.15 yamt assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, 0) == 0);
1009 1.15 yamt assert(radix_tree_gang_lookup_tagged_node(t, 1000, results, 3, 0) == 0);
1010 1.15 yamt assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3, 0)
1011 1.15 yamt == 0);
1012 1.15 yamt assert(radix_tree_gang_lookup_tagged_node_reverse(t, 1000, results, 3,
1013 1.15 yamt 0) == 0);
1014 1.15 yamt assert(radix_tree_empty_tree_p(t));
1015 1.15 yamt assert(radix_tree_insert_node(t, 0, (void *)0xdeadbea0) == 0);
1016 1.15 yamt assert(!radix_tree_empty_tree_p(t));
1017 1.15 yamt assert(radix_tree_lookup_node(t, 0) == (void *)0xdeadbea0);
1018 1.15 yamt assert(radix_tree_lookup_node(t, 1000) == NULL);
1019 1.15 yamt memset(results, 0, sizeof(results));
1020 1.15 yamt assert(radix_tree_gang_lookup_node(t, 0, results, 3) == 1);
1021 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1022 1.15 yamt assert(radix_tree_gang_lookup_node(t, 1000, results, 3) == 0);
1023 1.15 yamt memset(results, 0, sizeof(results));
1024 1.15 yamt assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3) == 1);
1025 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1026 1.15 yamt memset(results, 0, sizeof(results));
1027 1.15 yamt assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3) == 1);
1028 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1029 1.15 yamt assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, 0)
1030 1.15 yamt == 0);
1031 1.15 yamt assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3, 0)
1032 1.15 yamt == 0);
1033 1.1 yamt assert(radix_tree_insert_node(t, 1000, (void *)0xdeadbea0) == 0);
1034 1.15 yamt assert(radix_tree_remove_node(t, 0) == (void *)0xdeadbea0);
1035 1.15 yamt assert(!radix_tree_empty_tree_p(t));
1036 1.1 yamt radix_tree_dump(t);
1037 1.15 yamt assert(radix_tree_lookup_node(t, 0) == NULL);
1038 1.15 yamt assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1039 1.15 yamt memset(results, 0, sizeof(results));
1040 1.15 yamt assert(radix_tree_gang_lookup_node(t, 0, results, 3) == 1);
1041 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1042 1.15 yamt memset(results, 0, sizeof(results));
1043 1.15 yamt assert(radix_tree_gang_lookup_node(t, 1000, results, 3) == 1);
1044 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1045 1.15 yamt assert(radix_tree_gang_lookup_node_reverse(t, 0, results, 3) == 0);
1046 1.15 yamt memset(results, 0, sizeof(results));
1047 1.15 yamt assert(radix_tree_gang_lookup_node_reverse(t, 1000, results, 3) == 1);
1048 1.15 yamt assert(results[0] == (void *)0xdeadbea0);
1049 1.15 yamt assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 3, 0)
1050 1.15 yamt == 0);
1051 1.15 yamt assert(radix_tree_gang_lookup_tagged_node_reverse(t, 0, results, 3, 0)
1052 1.15 yamt == 0);
1053 1.1 yamt assert(!radix_tree_get_tag(t, 1000, 0));
1054 1.1 yamt assert(!radix_tree_get_tag(t, 1000, 1));
1055 1.1 yamt radix_tree_set_tag(t, 1000, 1);
1056 1.1 yamt assert(!radix_tree_get_tag(t, 1000, 0));
1057 1.1 yamt assert(radix_tree_get_tag(t, 1000, 1));
1058 1.1 yamt radix_tree_dump(t);
1059 1.1 yamt assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1060 1.1 yamt assert(radix_tree_insert_node(t, 0, (void *)0xbea0) == 0);
1061 1.1 yamt radix_tree_dump(t);
1062 1.1 yamt assert(radix_tree_lookup_node(t, 0) == (void *)0xbea0);
1063 1.1 yamt assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1064 1.1 yamt assert(radix_tree_insert_node(t, UINT64_C(10000000000), (void *)0xdea0)
1065 1.1 yamt == 0);
1066 1.1 yamt radix_tree_dump(t);
1067 1.1 yamt assert(radix_tree_lookup_node(t, 0) == (void *)0xbea0);
1068 1.1 yamt assert(radix_tree_lookup_node(t, 1000) == (void *)0xdeadbea0);
1069 1.1 yamt assert(radix_tree_lookup_node(t, UINT64_C(10000000000)) ==
1070 1.1 yamt (void *)0xdea0);
1071 1.1 yamt radix_tree_dump(t);
1072 1.1 yamt assert(!radix_tree_get_tag(t, 0, 1));
1073 1.1 yamt assert(radix_tree_get_tag(t, 1000, 1));
1074 1.1 yamt assert(!radix_tree_get_tag(t, UINT64_C(10000000000), 1));
1075 1.1 yamt radix_tree_set_tag(t, 0, 1);;
1076 1.1 yamt radix_tree_set_tag(t, UINT64_C(10000000000), 1);
1077 1.1 yamt radix_tree_dump(t);
1078 1.1 yamt assert(radix_tree_get_tag(t, 0, 1));
1079 1.1 yamt assert(radix_tree_get_tag(t, 1000, 1));
1080 1.1 yamt assert(radix_tree_get_tag(t, UINT64_C(10000000000), 1));
1081 1.1 yamt radix_tree_clear_tag(t, 0, 1);;
1082 1.1 yamt radix_tree_clear_tag(t, UINT64_C(10000000000), 1);
1083 1.1 yamt radix_tree_dump(t);
1084 1.1 yamt assert(!radix_tree_get_tag(t, 0, 1));
1085 1.1 yamt assert(radix_tree_get_tag(t, 1000, 1));
1086 1.1 yamt assert(!radix_tree_get_tag(t, UINT64_C(10000000000), 1));
1087 1.1 yamt radix_tree_dump(t);
1088 1.1 yamt assert(radix_tree_replace_node(t, 1000, (void *)0x12345678) ==
1089 1.1 yamt (void *)0xdeadbea0);
1090 1.1 yamt assert(!radix_tree_get_tag(t, 1000, 0));
1091 1.1 yamt assert(radix_tree_get_tag(t, 1000, 1));
1092 1.1 yamt assert(radix_tree_gang_lookup_node(t, 0, results, 3) == 3);
1093 1.1 yamt assert(results[0] == (void *)0xbea0);
1094 1.1 yamt assert(results[1] == (void *)0x12345678);
1095 1.1 yamt assert(results[2] == (void *)0xdea0);
1096 1.1 yamt assert(radix_tree_gang_lookup_node(t, 1, results, 3) == 2);
1097 1.1 yamt assert(results[0] == (void *)0x12345678);
1098 1.1 yamt assert(results[1] == (void *)0xdea0);
1099 1.1 yamt assert(radix_tree_gang_lookup_node(t, 1001, results, 3) == 1);
1100 1.1 yamt assert(results[0] == (void *)0xdea0);
1101 1.1 yamt assert(radix_tree_gang_lookup_node(t, UINT64_C(10000000001), results, 3)
1102 1.1 yamt == 0);
1103 1.1 yamt assert(radix_tree_gang_lookup_node(t, UINT64_C(1000000000000), results,
1104 1.1 yamt 3) == 0);
1105 1.1 yamt assert(radix_tree_gang_lookup_tagged_node(t, 0, results, 100, 1) == 1);
1106 1.1 yamt assert(results[0] == (void *)0x12345678);
1107 1.1 yamt assert(entry_tagmask(t->t_root) != 0);
1108 1.1 yamt assert(radix_tree_remove_node(t, 1000) == (void *)0x12345678);
1109 1.1 yamt assert(entry_tagmask(t->t_root) == 0);
1110 1.1 yamt radix_tree_dump(t);
1111 1.1 yamt assert(radix_tree_remove_node(t, UINT64_C(10000000000)) ==
1112 1.1 yamt (void *)0xdea0);
1113 1.1 yamt radix_tree_dump(t);
1114 1.1 yamt assert(radix_tree_remove_node(t, 0) == (void *)0xbea0);
1115 1.1 yamt radix_tree_dump(t);
1116 1.1 yamt radix_tree_fini_tree(t);
1117 1.1 yamt }
1118 1.1 yamt
1119 1.1 yamt #include <sys/time.h>
1120 1.1 yamt
1121 1.1 yamt struct testnode {
1122 1.1 yamt uint64_t idx;
1123 1.12 yamt bool tagged[RADIX_TREE_TAG_ID_MAX];
1124 1.1 yamt };
1125 1.1 yamt
1126 1.1 yamt static void
1127 1.11 yamt printops(const char *title, const char *name, int tag, unsigned int n,
1128 1.11 yamt const struct timeval *stv, const struct timeval *etv)
1129 1.1 yamt {
1130 1.1 yamt uint64_t s = stv->tv_sec * 1000000 + stv->tv_usec;
1131 1.1 yamt uint64_t e = etv->tv_sec * 1000000 + etv->tv_usec;
1132 1.1 yamt
1133 1.11 yamt printf("RESULT %s %s %d %lf op/s\n", title, name, tag,
1134 1.11 yamt (double)n / (e - s) * 1000000);
1135 1.1 yamt }
1136 1.1 yamt
1137 1.1 yamt #define TEST2_GANG_LOOKUP_NODES 16
1138 1.1 yamt
1139 1.1 yamt static bool
1140 1.1 yamt test2_should_tag(unsigned int i, radix_tree_tagid_t tagid)
1141 1.1 yamt {
1142 1.1 yamt
1143 1.1 yamt if (tagid == 0) {
1144 1.11 yamt return (i & 0x3) == 0; /* 25% */
1145 1.1 yamt } else {
1146 1.11 yamt return (i % 7) == 0; /* 14% */
1147 1.1 yamt }
1148 1.1 yamt }
1149 1.1 yamt
1150 1.1 yamt static void
1151 1.11 yamt test2(const char *title, bool dense)
1152 1.1 yamt {
1153 1.1 yamt struct radix_tree s;
1154 1.1 yamt struct radix_tree *t = &s;
1155 1.1 yamt struct testnode *n;
1156 1.1 yamt unsigned int i;
1157 1.1 yamt unsigned int nnodes = 100000;
1158 1.1 yamt unsigned int removed;
1159 1.1 yamt radix_tree_tagid_t tag;
1160 1.1 yamt unsigned int ntagged[RADIX_TREE_TAG_ID_MAX];
1161 1.1 yamt struct testnode *nodes;
1162 1.1 yamt struct timeval stv;
1163 1.1 yamt struct timeval etv;
1164 1.1 yamt
1165 1.1 yamt nodes = malloc(nnodes * sizeof(*nodes));
1166 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1167 1.1 yamt ntagged[tag] = 0;
1168 1.1 yamt }
1169 1.1 yamt radix_tree_init_tree(t);
1170 1.1 yamt for (i = 0; i < nnodes; i++) {
1171 1.1 yamt n = &nodes[i];
1172 1.1 yamt n->idx = random();
1173 1.1 yamt if (sizeof(long) == 4) {
1174 1.1 yamt n->idx <<= 32;
1175 1.1 yamt n->idx |= (uint32_t)random();
1176 1.1 yamt }
1177 1.1 yamt if (dense) {
1178 1.1 yamt n->idx %= nnodes * 2;
1179 1.1 yamt }
1180 1.1 yamt while (radix_tree_lookup_node(t, n->idx) != NULL) {
1181 1.1 yamt n->idx++;
1182 1.1 yamt }
1183 1.1 yamt radix_tree_insert_node(t, n->idx, n);
1184 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1185 1.12 yamt n->tagged[tag] = test2_should_tag(i, tag);
1186 1.12 yamt if (n->tagged[tag]) {
1187 1.1 yamt radix_tree_set_tag(t, n->idx, tag);
1188 1.1 yamt ntagged[tag]++;
1189 1.1 yamt }
1190 1.12 yamt assert(n->tagged[tag] ==
1191 1.1 yamt radix_tree_get_tag(t, n->idx, tag));
1192 1.1 yamt }
1193 1.1 yamt }
1194 1.1 yamt
1195 1.1 yamt gettimeofday(&stv, NULL);
1196 1.1 yamt for (i = 0; i < nnodes; i++) {
1197 1.1 yamt n = &nodes[i];
1198 1.1 yamt assert(radix_tree_lookup_node(t, n->idx) == n);
1199 1.1 yamt }
1200 1.1 yamt gettimeofday(&etv, NULL);
1201 1.11 yamt printops(title, "lookup", 0, nnodes, &stv, &etv);
1202 1.1 yamt
1203 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1204 1.12 yamt unsigned int count = 0;
1205 1.12 yamt
1206 1.1 yamt gettimeofday(&stv, NULL);
1207 1.1 yamt for (i = 0; i < nnodes; i++) {
1208 1.12 yamt bool tagged;
1209 1.12 yamt
1210 1.1 yamt n = &nodes[i];
1211 1.12 yamt tagged = radix_tree_get_tag(t, n->idx, tag);
1212 1.12 yamt assert(n->tagged[tag] == tagged);
1213 1.12 yamt if (tagged) {
1214 1.12 yamt count++;
1215 1.12 yamt }
1216 1.1 yamt }
1217 1.1 yamt gettimeofday(&etv, NULL);
1218 1.12 yamt assert(ntagged[tag] == count);
1219 1.12 yamt printops(title, "get_tag", tag, nnodes, &stv, &etv);
1220 1.1 yamt }
1221 1.1 yamt
1222 1.1 yamt gettimeofday(&stv, NULL);
1223 1.1 yamt for (i = 0; i < nnodes; i++) {
1224 1.1 yamt n = &nodes[i];
1225 1.1 yamt radix_tree_remove_node(t, n->idx);
1226 1.1 yamt }
1227 1.1 yamt gettimeofday(&etv, NULL);
1228 1.11 yamt printops(title, "remove", 0, nnodes, &stv, &etv);
1229 1.1 yamt
1230 1.1 yamt gettimeofday(&stv, NULL);
1231 1.1 yamt for (i = 0; i < nnodes; i++) {
1232 1.1 yamt n = &nodes[i];
1233 1.1 yamt radix_tree_insert_node(t, n->idx, n);
1234 1.1 yamt }
1235 1.1 yamt gettimeofday(&etv, NULL);
1236 1.11 yamt printops(title, "insert", 0, nnodes, &stv, &etv);
1237 1.1 yamt
1238 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1239 1.1 yamt ntagged[tag] = 0;
1240 1.1 yamt gettimeofday(&stv, NULL);
1241 1.1 yamt for (i = 0; i < nnodes; i++) {
1242 1.1 yamt n = &nodes[i];
1243 1.12 yamt if (n->tagged[tag]) {
1244 1.1 yamt radix_tree_set_tag(t, n->idx, tag);
1245 1.1 yamt ntagged[tag]++;
1246 1.1 yamt }
1247 1.1 yamt }
1248 1.1 yamt gettimeofday(&etv, NULL);
1249 1.11 yamt printops(title, "set_tag", tag, ntagged[tag], &stv, &etv);
1250 1.1 yamt }
1251 1.1 yamt
1252 1.1 yamt gettimeofday(&stv, NULL);
1253 1.1 yamt {
1254 1.1 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1255 1.1 yamt uint64_t nextidx;
1256 1.1 yamt unsigned int nfound;
1257 1.1 yamt unsigned int total;
1258 1.1 yamt
1259 1.1 yamt nextidx = 0;
1260 1.1 yamt total = 0;
1261 1.1 yamt while ((nfound = radix_tree_gang_lookup_node(t, nextidx,
1262 1.1 yamt (void *)results, __arraycount(results))) > 0) {
1263 1.1 yamt nextidx = results[nfound - 1]->idx + 1;
1264 1.1 yamt total += nfound;
1265 1.15 yamt if (nextidx == 0) {
1266 1.15 yamt break;
1267 1.15 yamt }
1268 1.1 yamt }
1269 1.1 yamt assert(total == nnodes);
1270 1.1 yamt }
1271 1.1 yamt gettimeofday(&etv, NULL);
1272 1.11 yamt printops(title, "ganglookup", 0, nnodes, &stv, &etv);
1273 1.1 yamt
1274 1.15 yamt gettimeofday(&stv, NULL);
1275 1.15 yamt {
1276 1.15 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1277 1.15 yamt uint64_t nextidx;
1278 1.15 yamt unsigned int nfound;
1279 1.15 yamt unsigned int total;
1280 1.15 yamt
1281 1.15 yamt nextidx = UINT64_MAX;
1282 1.15 yamt total = 0;
1283 1.15 yamt while ((nfound = radix_tree_gang_lookup_node_reverse(t, nextidx,
1284 1.15 yamt (void *)results, __arraycount(results))) > 0) {
1285 1.15 yamt nextidx = results[nfound - 1]->idx - 1;
1286 1.15 yamt total += nfound;
1287 1.15 yamt if (nextidx == UINT64_MAX) {
1288 1.15 yamt break;
1289 1.15 yamt }
1290 1.15 yamt }
1291 1.15 yamt assert(total == nnodes);
1292 1.15 yamt }
1293 1.15 yamt gettimeofday(&etv, NULL);
1294 1.15 yamt printops(title, "ganglookup_reverse", 0, nnodes, &stv, &etv);
1295 1.15 yamt
1296 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1297 1.1 yamt gettimeofday(&stv, NULL);
1298 1.1 yamt {
1299 1.1 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1300 1.1 yamt uint64_t nextidx;
1301 1.1 yamt unsigned int nfound;
1302 1.1 yamt unsigned int total;
1303 1.1 yamt
1304 1.1 yamt nextidx = 0;
1305 1.1 yamt total = 0;
1306 1.1 yamt while ((nfound = radix_tree_gang_lookup_tagged_node(t,
1307 1.1 yamt nextidx, (void *)results, __arraycount(results),
1308 1.1 yamt tag)) > 0) {
1309 1.1 yamt nextidx = results[nfound - 1]->idx + 1;
1310 1.1 yamt total += nfound;
1311 1.1 yamt }
1312 1.1 yamt assert(total == ntagged[tag]);
1313 1.1 yamt }
1314 1.1 yamt gettimeofday(&etv, NULL);
1315 1.11 yamt printops(title, "ganglookup_tag", tag, ntagged[tag], &stv,
1316 1.11 yamt &etv);
1317 1.1 yamt }
1318 1.1 yamt
1319 1.15 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1320 1.15 yamt gettimeofday(&stv, NULL);
1321 1.15 yamt {
1322 1.15 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1323 1.15 yamt uint64_t nextidx;
1324 1.15 yamt unsigned int nfound;
1325 1.15 yamt unsigned int total;
1326 1.15 yamt
1327 1.15 yamt nextidx = UINT64_MAX;
1328 1.15 yamt total = 0;
1329 1.15 yamt while ((nfound =
1330 1.15 yamt radix_tree_gang_lookup_tagged_node_reverse(t,
1331 1.15 yamt nextidx, (void *)results, __arraycount(results),
1332 1.15 yamt tag)) > 0) {
1333 1.15 yamt nextidx = results[nfound - 1]->idx - 1;
1334 1.15 yamt total += nfound;
1335 1.15 yamt if (nextidx == UINT64_MAX) {
1336 1.15 yamt break;
1337 1.15 yamt }
1338 1.15 yamt }
1339 1.15 yamt assert(total == ntagged[tag]);
1340 1.15 yamt }
1341 1.15 yamt gettimeofday(&etv, NULL);
1342 1.15 yamt printops(title, "ganglookup_tag_reverse", tag, ntagged[tag],
1343 1.15 yamt &stv, &etv);
1344 1.15 yamt }
1345 1.15 yamt
1346 1.1 yamt removed = 0;
1347 1.1 yamt for (tag = 0; tag < RADIX_TREE_TAG_ID_MAX; tag++) {
1348 1.1 yamt unsigned int total;
1349 1.1 yamt
1350 1.1 yamt total = 0;
1351 1.1 yamt gettimeofday(&stv, NULL);
1352 1.1 yamt {
1353 1.1 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1354 1.1 yamt uint64_t nextidx;
1355 1.1 yamt unsigned int nfound;
1356 1.1 yamt
1357 1.1 yamt nextidx = 0;
1358 1.1 yamt while ((nfound = radix_tree_gang_lookup_tagged_node(t,
1359 1.1 yamt nextidx, (void *)results, __arraycount(results),
1360 1.1 yamt tag)) > 0) {
1361 1.1 yamt for (i = 0; i < nfound; i++) {
1362 1.1 yamt radix_tree_remove_node(t,
1363 1.1 yamt results[i]->idx);
1364 1.1 yamt }
1365 1.1 yamt nextidx = results[nfound - 1]->idx + 1;
1366 1.1 yamt total += nfound;
1367 1.15 yamt if (nextidx == 0) {
1368 1.15 yamt break;
1369 1.15 yamt }
1370 1.1 yamt }
1371 1.1 yamt assert(tag != 0 || total == ntagged[tag]);
1372 1.1 yamt assert(total <= ntagged[tag]);
1373 1.1 yamt }
1374 1.1 yamt gettimeofday(&etv, NULL);
1375 1.11 yamt printops(title, "ganglookup_tag+remove", tag, total, &stv,
1376 1.11 yamt &etv);
1377 1.1 yamt removed += total;
1378 1.1 yamt }
1379 1.1 yamt
1380 1.1 yamt gettimeofday(&stv, NULL);
1381 1.1 yamt {
1382 1.1 yamt struct testnode *results[TEST2_GANG_LOOKUP_NODES];
1383 1.1 yamt uint64_t nextidx;
1384 1.1 yamt unsigned int nfound;
1385 1.1 yamt unsigned int total;
1386 1.1 yamt
1387 1.1 yamt nextidx = 0;
1388 1.1 yamt total = 0;
1389 1.1 yamt while ((nfound = radix_tree_gang_lookup_node(t, nextidx,
1390 1.1 yamt (void *)results, __arraycount(results))) > 0) {
1391 1.1 yamt for (i = 0; i < nfound; i++) {
1392 1.1 yamt assert(results[i] == radix_tree_remove_node(t,
1393 1.1 yamt results[i]->idx));
1394 1.1 yamt }
1395 1.1 yamt nextidx = results[nfound - 1]->idx + 1;
1396 1.1 yamt total += nfound;
1397 1.15 yamt if (nextidx == 0) {
1398 1.15 yamt break;
1399 1.15 yamt }
1400 1.1 yamt }
1401 1.1 yamt assert(total == nnodes - removed);
1402 1.1 yamt }
1403 1.1 yamt gettimeofday(&etv, NULL);
1404 1.11 yamt printops(title, "ganglookup+remove", 0, nnodes - removed, &stv, &etv);
1405 1.1 yamt
1406 1.1 yamt radix_tree_fini_tree(t);
1407 1.1 yamt free(nodes);
1408 1.1 yamt }
1409 1.1 yamt
1410 1.1 yamt int
1411 1.1 yamt main(int argc, char *argv[])
1412 1.1 yamt {
1413 1.1 yamt
1414 1.1 yamt test1();
1415 1.11 yamt test2("dense", true);
1416 1.11 yamt test2("sparse", false);
1417 1.1 yamt return 0;
1418 1.1 yamt }
1419 1.1 yamt
1420 1.1 yamt #endif /* defined(UNITTEST) */
1421