FreeBSD/Linux Kernel Cross Reference
sys/sys/tree.h
1 /* $NetBSD: tree.h,v 1.8 2004/03/28 19:38:30 provos Exp $ */
2 /* $OpenBSD: tree.h,v 1.7 2002/10/17 21:51:54 art Exp $ */
3 /* $FreeBSD$ */
4
5 /*-
6 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
7 *
8 * Copyright 2002 Niels Provos <provos@citi.umich.edu>
9 * All rights reserved.
10 *
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
13 * are met:
14 * 1. Redistributions of source code must retain the above copyright
15 * notice, this list of conditions and the following disclaimer.
16 * 2. Redistributions in binary form must reproduce the above copyright
17 * notice, this list of conditions and the following disclaimer in the
18 * documentation and/or other materials provided with the distribution.
19 *
20 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
21 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
22 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
23 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
24 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
25 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
29 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 */
31
32 #ifndef _SYS_TREE_H_
33 #define _SYS_TREE_H_
34
35 #include <sys/cdefs.h>
36
37 /*
38 * This file defines data structures for different types of trees:
39 * splay trees and rank-balanced trees.
40 *
41 * A splay tree is a self-organizing data structure. Every operation
42 * on the tree causes a splay to happen. The splay moves the requested
43 * node to the root of the tree and partly rebalances it.
44 *
45 * This has the benefit that request locality causes faster lookups as
46 * the requested nodes move to the top of the tree. On the other hand,
47 * every lookup causes memory writes.
48 *
49 * The Balance Theorem bounds the total access time for m operations
50 * and n inserts on an initially empty tree as O((m + n)lg n). The
51 * amortized cost for a sequence of m accesses to a splay tree is O(lg n);
52 *
53 * A rank-balanced tree is a binary search tree with an integer
54 * rank-difference as an attribute of each pointer from parent to child.
55 * The sum of the rank-differences on any path from a node down to null is
56 * the same, and defines the rank of that node. The rank of the null node
57 * is -1.
58 *
59 * Different additional conditions define different sorts of balanced trees,
60 * including "red-black" and "AVL" trees. The set of conditions applied here
61 * are the "weak-AVL" conditions of Haeupler, Sen and Tarjan presented in in
62 * "Rank Balanced Trees", ACM Transactions on Algorithms Volume 11 Issue 4 June
63 * 2015 Article No.: 30pp 1–26 https://doi.org/10.1145/2689412 (the HST paper):
64 * - every rank-difference is 1 or 2.
65 * - the rank of any leaf is 1.
66 *
67 * For historical reasons, rank differences that are even are associated
68 * with the color red (Rank-Even-Difference), and the child that a red edge
69 * points to is called a red child.
70 *
71 * Every operation on a rank-balanced tree is bounded as O(lg n).
72 * The maximum height of a rank-balanced tree is 2lg (n+1).
73 */
74
75 #define SPLAY_HEAD(name, type) \
76 struct name { \
77 struct type *sph_root; /* root of the tree */ \
78 }
79
80 #define SPLAY_INITIALIZER(root) \
81 { NULL }
82
83 #define SPLAY_INIT(root) do { \
84 (root)->sph_root = NULL; \
85 } while (/*CONSTCOND*/ 0)
86
87 #define SPLAY_ENTRY(type) \
88 struct { \
89 struct type *spe_left; /* left element */ \
90 struct type *spe_right; /* right element */ \
91 }
92
93 #define SPLAY_LEFT(elm, field) (elm)->field.spe_left
94 #define SPLAY_RIGHT(elm, field) (elm)->field.spe_right
95 #define SPLAY_ROOT(head) (head)->sph_root
96 #define SPLAY_EMPTY(head) (SPLAY_ROOT(head) == NULL)
97
98 /* SPLAY_ROTATE_{LEFT,RIGHT} expect that tmp hold SPLAY_{RIGHT,LEFT} */
99 #define SPLAY_ROTATE_RIGHT(head, tmp, field) do { \
100 SPLAY_LEFT((head)->sph_root, field) = SPLAY_RIGHT(tmp, field); \
101 SPLAY_RIGHT(tmp, field) = (head)->sph_root; \
102 (head)->sph_root = tmp; \
103 } while (/*CONSTCOND*/ 0)
104
105 #define SPLAY_ROTATE_LEFT(head, tmp, field) do { \
106 SPLAY_RIGHT((head)->sph_root, field) = SPLAY_LEFT(tmp, field); \
107 SPLAY_LEFT(tmp, field) = (head)->sph_root; \
108 (head)->sph_root = tmp; \
109 } while (/*CONSTCOND*/ 0)
110
111 #define SPLAY_LINKLEFT(head, tmp, field) do { \
112 SPLAY_LEFT(tmp, field) = (head)->sph_root; \
113 tmp = (head)->sph_root; \
114 (head)->sph_root = SPLAY_LEFT((head)->sph_root, field); \
115 } while (/*CONSTCOND*/ 0)
116
117 #define SPLAY_LINKRIGHT(head, tmp, field) do { \
118 SPLAY_RIGHT(tmp, field) = (head)->sph_root; \
119 tmp = (head)->sph_root; \
120 (head)->sph_root = SPLAY_RIGHT((head)->sph_root, field); \
121 } while (/*CONSTCOND*/ 0)
122
123 #define SPLAY_ASSEMBLE(head, node, left, right, field) do { \
124 SPLAY_RIGHT(left, field) = SPLAY_LEFT((head)->sph_root, field); \
125 SPLAY_LEFT(right, field) = SPLAY_RIGHT((head)->sph_root, field);\
126 SPLAY_LEFT((head)->sph_root, field) = SPLAY_RIGHT(node, field); \
127 SPLAY_RIGHT((head)->sph_root, field) = SPLAY_LEFT(node, field); \
128 } while (/*CONSTCOND*/ 0)
129
130 /* Generates prototypes and inline functions */
131
132 #define SPLAY_PROTOTYPE(name, type, field, cmp) \
133 void name##_SPLAY(struct name *, struct type *); \
134 void name##_SPLAY_MINMAX(struct name *, int); \
135 struct type *name##_SPLAY_INSERT(struct name *, struct type *); \
136 struct type *name##_SPLAY_REMOVE(struct name *, struct type *); \
137 \
138 /* Finds the node with the same key as elm */ \
139 static __unused __inline struct type * \
140 name##_SPLAY_FIND(struct name *head, struct type *elm) \
141 { \
142 if (SPLAY_EMPTY(head)) \
143 return(NULL); \
144 name##_SPLAY(head, elm); \
145 if ((cmp)(elm, (head)->sph_root) == 0) \
146 return (head->sph_root); \
147 return (NULL); \
148 } \
149 \
150 static __unused __inline struct type * \
151 name##_SPLAY_NEXT(struct name *head, struct type *elm) \
152 { \
153 name##_SPLAY(head, elm); \
154 if (SPLAY_RIGHT(elm, field) != NULL) { \
155 elm = SPLAY_RIGHT(elm, field); \
156 while (SPLAY_LEFT(elm, field) != NULL) { \
157 elm = SPLAY_LEFT(elm, field); \
158 } \
159 } else \
160 elm = NULL; \
161 return (elm); \
162 } \
163 \
164 static __unused __inline struct type * \
165 name##_SPLAY_MIN_MAX(struct name *head, int val) \
166 { \
167 name##_SPLAY_MINMAX(head, val); \
168 return (SPLAY_ROOT(head)); \
169 }
170
171 /* Main splay operation.
172 * Moves node close to the key of elm to top
173 */
174 #define SPLAY_GENERATE(name, type, field, cmp) \
175 struct type * \
176 name##_SPLAY_INSERT(struct name *head, struct type *elm) \
177 { \
178 if (SPLAY_EMPTY(head)) { \
179 SPLAY_LEFT(elm, field) = SPLAY_RIGHT(elm, field) = NULL; \
180 } else { \
181 __typeof(cmp(NULL, NULL)) __comp; \
182 name##_SPLAY(head, elm); \
183 __comp = (cmp)(elm, (head)->sph_root); \
184 if (__comp < 0) { \
185 SPLAY_LEFT(elm, field) = SPLAY_LEFT((head)->sph_root, field);\
186 SPLAY_RIGHT(elm, field) = (head)->sph_root; \
187 SPLAY_LEFT((head)->sph_root, field) = NULL; \
188 } else if (__comp > 0) { \
189 SPLAY_RIGHT(elm, field) = SPLAY_RIGHT((head)->sph_root, field);\
190 SPLAY_LEFT(elm, field) = (head)->sph_root; \
191 SPLAY_RIGHT((head)->sph_root, field) = NULL; \
192 } else \
193 return ((head)->sph_root); \
194 } \
195 (head)->sph_root = (elm); \
196 return (NULL); \
197 } \
198 \
199 struct type * \
200 name##_SPLAY_REMOVE(struct name *head, struct type *elm) \
201 { \
202 struct type *__tmp; \
203 if (SPLAY_EMPTY(head)) \
204 return (NULL); \
205 name##_SPLAY(head, elm); \
206 if ((cmp)(elm, (head)->sph_root) == 0) { \
207 if (SPLAY_LEFT((head)->sph_root, field) == NULL) { \
208 (head)->sph_root = SPLAY_RIGHT((head)->sph_root, field);\
209 } else { \
210 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
211 (head)->sph_root = SPLAY_LEFT((head)->sph_root, field);\
212 name##_SPLAY(head, elm); \
213 SPLAY_RIGHT((head)->sph_root, field) = __tmp; \
214 } \
215 return (elm); \
216 } \
217 return (NULL); \
218 } \
219 \
220 void \
221 name##_SPLAY(struct name *head, struct type *elm) \
222 { \
223 struct type __node, *__left, *__right, *__tmp; \
224 __typeof(cmp(NULL, NULL)) __comp; \
225 \
226 SPLAY_LEFT(&__node, field) = SPLAY_RIGHT(&__node, field) = NULL;\
227 __left = __right = &__node; \
228 \
229 while ((__comp = (cmp)(elm, (head)->sph_root)) != 0) { \
230 if (__comp < 0) { \
231 __tmp = SPLAY_LEFT((head)->sph_root, field); \
232 if (__tmp == NULL) \
233 break; \
234 if ((cmp)(elm, __tmp) < 0){ \
235 SPLAY_ROTATE_RIGHT(head, __tmp, field); \
236 if (SPLAY_LEFT((head)->sph_root, field) == NULL)\
237 break; \
238 } \
239 SPLAY_LINKLEFT(head, __right, field); \
240 } else if (__comp > 0) { \
241 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
242 if (__tmp == NULL) \
243 break; \
244 if ((cmp)(elm, __tmp) > 0){ \
245 SPLAY_ROTATE_LEFT(head, __tmp, field); \
246 if (SPLAY_RIGHT((head)->sph_root, field) == NULL)\
247 break; \
248 } \
249 SPLAY_LINKRIGHT(head, __left, field); \
250 } \
251 } \
252 SPLAY_ASSEMBLE(head, &__node, __left, __right, field); \
253 } \
254 \
255 /* Splay with either the minimum or the maximum element \
256 * Used to find minimum or maximum element in tree. \
257 */ \
258 void name##_SPLAY_MINMAX(struct name *head, int __comp) \
259 { \
260 struct type __node, *__left, *__right, *__tmp; \
261 \
262 SPLAY_LEFT(&__node, field) = SPLAY_RIGHT(&__node, field) = NULL;\
263 __left = __right = &__node; \
264 \
265 while (1) { \
266 if (__comp < 0) { \
267 __tmp = SPLAY_LEFT((head)->sph_root, field); \
268 if (__tmp == NULL) \
269 break; \
270 if (__comp < 0){ \
271 SPLAY_ROTATE_RIGHT(head, __tmp, field); \
272 if (SPLAY_LEFT((head)->sph_root, field) == NULL)\
273 break; \
274 } \
275 SPLAY_LINKLEFT(head, __right, field); \
276 } else if (__comp > 0) { \
277 __tmp = SPLAY_RIGHT((head)->sph_root, field); \
278 if (__tmp == NULL) \
279 break; \
280 if (__comp > 0) { \
281 SPLAY_ROTATE_LEFT(head, __tmp, field); \
282 if (SPLAY_RIGHT((head)->sph_root, field) == NULL)\
283 break; \
284 } \
285 SPLAY_LINKRIGHT(head, __left, field); \
286 } \
287 } \
288 SPLAY_ASSEMBLE(head, &__node, __left, __right, field); \
289 }
290
291 #define SPLAY_NEGINF -1
292 #define SPLAY_INF 1
293
294 #define SPLAY_INSERT(name, x, y) name##_SPLAY_INSERT(x, y)
295 #define SPLAY_REMOVE(name, x, y) name##_SPLAY_REMOVE(x, y)
296 #define SPLAY_FIND(name, x, y) name##_SPLAY_FIND(x, y)
297 #define SPLAY_NEXT(name, x, y) name##_SPLAY_NEXT(x, y)
298 #define SPLAY_MIN(name, x) (SPLAY_EMPTY(x) ? NULL \
299 : name##_SPLAY_MIN_MAX(x, SPLAY_NEGINF))
300 #define SPLAY_MAX(name, x) (SPLAY_EMPTY(x) ? NULL \
301 : name##_SPLAY_MIN_MAX(x, SPLAY_INF))
302
303 #define SPLAY_FOREACH(x, name, head) \
304 for ((x) = SPLAY_MIN(name, head); \
305 (x) != NULL; \
306 (x) = SPLAY_NEXT(name, head, x))
307
308 /* Macros that define a rank-balanced tree */
309 #define RB_HEAD(name, type) \
310 struct name { \
311 struct type *rbh_root; /* root of the tree */ \
312 }
313
314 #define RB_INITIALIZER(root) \
315 { NULL }
316
317 #define RB_INIT(root) do { \
318 (root)->rbh_root = NULL; \
319 } while (/*CONSTCOND*/ 0)
320
321 #define RB_ENTRY(type) \
322 struct { \
323 struct type *rbe_link[3]; \
324 }
325
326 /*
327 * With the expectation that any object of struct type has an
328 * address that is a multiple of 4, and that therefore the
329 * 2 least significant bits of a pointer to struct type are
330 * always zero, this implementation sets those bits to indicate
331 * that the left or right child of the tree node is "red".
332 */
333 #define _RB_LINK(elm, dir, field) (elm)->field.rbe_link[dir]
334 #define _RB_UP(elm, field) _RB_LINK(elm, 0, field)
335 #define _RB_L ((__uintptr_t)1)
336 #define _RB_R ((__uintptr_t)2)
337 #define _RB_LR ((__uintptr_t)3)
338 #define _RB_BITS(elm) (*(__uintptr_t *)&elm)
339 #define _RB_BITSUP(elm, field) _RB_BITS(_RB_UP(elm, field))
340 #define _RB_PTR(elm) (__typeof(elm)) \
341 ((__uintptr_t)elm & ~_RB_LR)
342
343 #define RB_PARENT(elm, field) _RB_PTR(_RB_UP(elm, field))
344 #define RB_LEFT(elm, field) _RB_LINK(elm, _RB_L, field)
345 #define RB_RIGHT(elm, field) _RB_LINK(elm, _RB_R, field)
346 #define RB_ROOT(head) (head)->rbh_root
347 #define RB_EMPTY(head) (RB_ROOT(head) == NULL)
348
349 #define RB_SET_PARENT(dst, src, field) do { \
350 _RB_BITSUP(dst, field) = (__uintptr_t)src | \
351 (_RB_BITSUP(dst, field) & _RB_LR); \
352 } while (/*CONSTCOND*/ 0)
353
354 #define RB_SET(elm, parent, field) do { \
355 _RB_UP(elm, field) = parent; \
356 RB_LEFT(elm, field) = RB_RIGHT(elm, field) = NULL; \
357 } while (/*CONSTCOND*/ 0)
358
359 /*
360 * Either RB_AUGMENT or RB_AUGMENT_CHECK is invoked in a loop at the root of
361 * every modified subtree, from the bottom up to the root, to update augmented
362 * node data. RB_AUGMENT_CHECK returns true only when the update changes the
363 * node data, so that updating can be stopped short of the root when it returns
364 * false.
365 */
366 #ifndef RB_AUGMENT_CHECK
367 #ifndef RB_AUGMENT
368 #define RB_AUGMENT_CHECK(x) 0
369 #else
370 #define RB_AUGMENT_CHECK(x) (RB_AUGMENT(x), 1)
371 #endif
372 #endif
373
374 #define RB_UPDATE_AUGMENT(elm, field) do { \
375 __typeof(elm) rb_update_tmp = (elm); \
376 while (RB_AUGMENT_CHECK(rb_update_tmp) && \
377 (rb_update_tmp = RB_PARENT(rb_update_tmp, field)) != NULL) \
378 ; \
379 } while (0)
380
381 #define RB_SWAP_CHILD(head, par, out, in, field) do { \
382 if (par == NULL) \
383 RB_ROOT(head) = (in); \
384 else if ((out) == RB_LEFT(par, field)) \
385 RB_LEFT(par, field) = (in); \
386 else \
387 RB_RIGHT(par, field) = (in); \
388 } while (/*CONSTCOND*/ 0)
389
390 /*
391 * RB_ROTATE macro partially restructures the tree to improve balance. In the
392 * case when dir is _RB_L, tmp is a right child of elm. After rotation, elm
393 * is a left child of tmp, and the subtree that represented the items between
394 * them, which formerly hung to the left of tmp now hangs to the right of elm.
395 * The parent-child relationship between elm and its former parent is not
396 * changed; where this macro once updated those fields, that is now left to the
397 * caller of RB_ROTATE to clean up, so that a pair of rotations does not twice
398 * update the same pair of pointer fields with distinct values.
399 */
400 #define RB_ROTATE(elm, tmp, dir, field) do { \
401 if ((_RB_LINK(elm, dir ^ _RB_LR, field) = \
402 _RB_LINK(tmp, dir, field)) != NULL) \
403 RB_SET_PARENT(_RB_LINK(tmp, dir, field), elm, field); \
404 _RB_LINK(tmp, dir, field) = (elm); \
405 RB_SET_PARENT(elm, tmp, field); \
406 } while (/*CONSTCOND*/ 0)
407
408 /* Generates prototypes and inline functions */
409 #define RB_PROTOTYPE(name, type, field, cmp) \
410 RB_PROTOTYPE_INTERNAL(name, type, field, cmp,)
411 #define RB_PROTOTYPE_STATIC(name, type, field, cmp) \
412 RB_PROTOTYPE_INTERNAL(name, type, field, cmp, __unused static)
413 #define RB_PROTOTYPE_INTERNAL(name, type, field, cmp, attr) \
414 RB_PROTOTYPE_RANK(name, type, attr) \
415 RB_PROTOTYPE_INSERT_COLOR(name, type, attr); \
416 RB_PROTOTYPE_REMOVE_COLOR(name, type, attr); \
417 RB_PROTOTYPE_INSERT_FINISH(name, type, attr); \
418 RB_PROTOTYPE_INSERT(name, type, attr); \
419 RB_PROTOTYPE_REMOVE(name, type, attr); \
420 RB_PROTOTYPE_FIND(name, type, attr); \
421 RB_PROTOTYPE_NFIND(name, type, attr); \
422 RB_PROTOTYPE_NEXT(name, type, attr); \
423 RB_PROTOTYPE_INSERT_NEXT(name, type, attr); \
424 RB_PROTOTYPE_PREV(name, type, attr); \
425 RB_PROTOTYPE_INSERT_PREV(name, type, attr); \
426 RB_PROTOTYPE_MINMAX(name, type, attr); \
427 RB_PROTOTYPE_REINSERT(name, type, attr);
428 #ifdef _RB_DIAGNOSTIC
429 #define RB_PROTOTYPE_RANK(name, type, attr) \
430 attr int name##_RB_RANK(struct type *);
431 #else
432 #define RB_PROTOTYPE_RANK(name, type, attr)
433 #endif
434 #define RB_PROTOTYPE_INSERT_COLOR(name, type, attr) \
435 attr struct type *name##_RB_INSERT_COLOR(struct name *, \
436 struct type *, struct type *)
437 #define RB_PROTOTYPE_REMOVE_COLOR(name, type, attr) \
438 attr struct type *name##_RB_REMOVE_COLOR(struct name *, \
439 struct type *, struct type *)
440 #define RB_PROTOTYPE_REMOVE(name, type, attr) \
441 attr struct type *name##_RB_REMOVE(struct name *, struct type *)
442 #define RB_PROTOTYPE_INSERT_FINISH(name, type, attr) \
443 attr struct type *name##_RB_INSERT_FINISH(struct name *, \
444 struct type *, struct type **, struct type *)
445 #define RB_PROTOTYPE_INSERT(name, type, attr) \
446 attr struct type *name##_RB_INSERT(struct name *, struct type *)
447 #define RB_PROTOTYPE_FIND(name, type, attr) \
448 attr struct type *name##_RB_FIND(struct name *, struct type *)
449 #define RB_PROTOTYPE_NFIND(name, type, attr) \
450 attr struct type *name##_RB_NFIND(struct name *, struct type *)
451 #define RB_PROTOTYPE_NEXT(name, type, attr) \
452 attr struct type *name##_RB_NEXT(struct type *)
453 #define RB_PROTOTYPE_INSERT_NEXT(name, type, attr) \
454 attr struct type *name##_RB_INSERT_NEXT(struct name *, \
455 struct type *, struct type *)
456 #define RB_PROTOTYPE_PREV(name, type, attr) \
457 attr struct type *name##_RB_PREV(struct type *)
458 #define RB_PROTOTYPE_INSERT_PREV(name, type, attr) \
459 attr struct type *name##_RB_INSERT_PREV(struct name *, \
460 struct type *, struct type *)
461 #define RB_PROTOTYPE_MINMAX(name, type, attr) \
462 attr struct type *name##_RB_MINMAX(struct name *, int)
463 #define RB_PROTOTYPE_REINSERT(name, type, attr) \
464 attr struct type *name##_RB_REINSERT(struct name *, struct type *)
465
466 /* Main rb operation.
467 * Moves node close to the key of elm to top
468 */
469 #define RB_GENERATE(name, type, field, cmp) \
470 RB_GENERATE_INTERNAL(name, type, field, cmp,)
471 #define RB_GENERATE_STATIC(name, type, field, cmp) \
472 RB_GENERATE_INTERNAL(name, type, field, cmp, __unused static)
473 #define RB_GENERATE_INTERNAL(name, type, field, cmp, attr) \
474 RB_GENERATE_RANK(name, type, field, attr) \
475 RB_GENERATE_INSERT_COLOR(name, type, field, attr) \
476 RB_GENERATE_REMOVE_COLOR(name, type, field, attr) \
477 RB_GENERATE_INSERT_FINISH(name, type, field, attr) \
478 RB_GENERATE_INSERT(name, type, field, cmp, attr) \
479 RB_GENERATE_REMOVE(name, type, field, attr) \
480 RB_GENERATE_FIND(name, type, field, cmp, attr) \
481 RB_GENERATE_NFIND(name, type, field, cmp, attr) \
482 RB_GENERATE_NEXT(name, type, field, attr) \
483 RB_GENERATE_INSERT_NEXT(name, type, field, cmp, attr) \
484 RB_GENERATE_PREV(name, type, field, attr) \
485 RB_GENERATE_INSERT_PREV(name, type, field, cmp, attr) \
486 RB_GENERATE_MINMAX(name, type, field, attr) \
487 RB_GENERATE_REINSERT(name, type, field, cmp, attr)
488
489 #ifdef _RB_DIAGNOSTIC
490 #ifndef RB_AUGMENT
491 #define _RB_AUGMENT_VERIFY(x) RB_AUGMENT_CHECK(x)
492 #else
493 #define _RB_AUGMENT_VERIFY(x) 0
494 #endif
495 #define RB_GENERATE_RANK(name, type, field, attr) \
496 /* \
497 * Return the rank of the subtree rooted at elm, or -1 if the subtree \
498 * is not rank-balanced, or has inconsistent augmentation data.
499 */ \
500 attr int \
501 name##_RB_RANK(struct type *elm) \
502 { \
503 struct type *left, *right, *up; \
504 int left_rank, right_rank; \
505 \
506 if (elm == NULL) \
507 return (0); \
508 up = _RB_UP(elm, field); \
509 left = RB_LEFT(elm, field); \
510 left_rank = ((_RB_BITS(up) & _RB_L) ? 2 : 1) + \
511 name##_RB_RANK(left); \
512 right = RB_RIGHT(elm, field); \
513 right_rank = ((_RB_BITS(up) & _RB_R) ? 2 : 1) + \
514 name##_RB_RANK(right); \
515 if (left_rank != right_rank || \
516 (left_rank == 2 && left == NULL && right == NULL) || \
517 _RB_AUGMENT_VERIFY(elm)) \
518 return (-1); \
519 return (left_rank); \
520 }
521 #else
522 #define RB_GENERATE_RANK(name, type, field, attr)
523 #endif
524
525 #define RB_GENERATE_INSERT_COLOR(name, type, field, attr) \
526 attr struct type * \
527 name##_RB_INSERT_COLOR(struct name *head, \
528 struct type *parent, struct type *elm) \
529 { \
530 /* \
531 * Initially, elm is a leaf. Either its parent was previously \
532 * a leaf, with two black null children, or an interior node \
533 * with a black non-null child and a red null child. The \
534 * balance criterion "the rank of any leaf is 1" precludes the \
535 * possibility of two red null children for the initial parent. \
536 * So the first loop iteration cannot lead to accessing an \
537 * uninitialized 'child', and a later iteration can only happen \
538 * when a value has been assigned to 'child' in the previous \
539 * one. \
540 */ \
541 struct type *child, *child_up, *gpar; \
542 __uintptr_t elmdir, sibdir; \
543 \
544 do { \
545 /* the rank of the tree rooted at elm grew */ \
546 gpar = _RB_UP(parent, field); \
547 elmdir = RB_RIGHT(parent, field) == elm ? _RB_R : _RB_L; \
548 if (_RB_BITS(gpar) & elmdir) { \
549 /* shorten the parent-elm edge to rebalance */ \
550 _RB_BITSUP(parent, field) ^= elmdir; \
551 return (NULL); \
552 } \
553 sibdir = elmdir ^ _RB_LR; \
554 /* the other edge must change length */ \
555 _RB_BITSUP(parent, field) ^= sibdir; \
556 if ((_RB_BITS(gpar) & _RB_LR) == 0) { \
557 /* both edges now short, retry from parent */ \
558 child = elm; \
559 elm = parent; \
560 continue; \
561 } \
562 _RB_UP(parent, field) = gpar = _RB_PTR(gpar); \
563 if (_RB_BITSUP(elm, field) & elmdir) { \
564 /* \
565 * Exactly one of the edges descending from elm \
566 * is long. The long one is in the same \
567 * direction as the edge from parent to elm, \
568 * so change that by rotation. The edge from \
569 * parent to z was shortened above. Shorten \
570 * the long edge down from elm, and adjust \
571 * other edge lengths based on the downward \
572 * edges from 'child'. \
573 * \
574 * par par \
575 * / \ / \ \
576 * elm z / z \
577 * / \ child \
578 * / child / \ \
579 * / / \ elm \ \
580 * w / \ / \ y \
581 * x y w \ \
582 * x \
583 */ \
584 RB_ROTATE(elm, child, elmdir, field); \
585 child_up = _RB_UP(child, field); \
586 if (_RB_BITS(child_up) & sibdir) \
587 _RB_BITSUP(parent, field) ^= elmdir; \
588 if (_RB_BITS(child_up) & elmdir) \
589 _RB_BITSUP(elm, field) ^= _RB_LR; \
590 else \
591 _RB_BITSUP(elm, field) ^= elmdir; \
592 /* if child is a leaf, don't augment elm, \
593 * since it is restored to be a leaf again. */ \
594 if ((_RB_BITS(child_up) & _RB_LR) == 0) \
595 elm = child; \
596 } else \
597 child = elm; \
598 \
599 /* \
600 * The long edge descending from 'child' points back \
601 * in the direction of 'parent'. Rotate to make \
602 * 'parent' a child of 'child', then make both edges \
603 * of 'child' short to rebalance. \
604 * \
605 * par child \
606 * / \ / \ \
607 * / z x par \
608 * child / \ \
609 * / \ / z \
610 * x \ y \
611 * y \
612 */ \
613 RB_ROTATE(parent, child, sibdir, field); \
614 _RB_UP(child, field) = gpar; \
615 RB_SWAP_CHILD(head, gpar, parent, child, field); \
616 /* \
617 * Elements rotated down have new, smaller subtrees, \
618 * so update augmentation for them. \
619 */ \
620 if (elm != child) \
621 (void)RB_AUGMENT_CHECK(elm); \
622 (void)RB_AUGMENT_CHECK(parent); \
623 return (child); \
624 } while ((parent = gpar) != NULL); \
625 return (NULL); \
626 }
627
628 #ifndef RB_STRICT_HST
629 /*
630 * In REMOVE_COLOR, the HST paper, in figure 3, in the single-rotate case, has
631 * 'parent' with one higher rank, and then reduces its rank if 'parent' has
632 * become a leaf. This implementation always has the parent in its new position
633 * with lower rank, to avoid the leaf check. Define RB_STRICT_HST to 1 to get
634 * the behavior that HST describes.
635 */
636 #define RB_STRICT_HST 0
637 #endif
638
639 #define RB_GENERATE_REMOVE_COLOR(name, type, field, attr) \
640 attr struct type * \
641 name##_RB_REMOVE_COLOR(struct name *head, \
642 struct type *parent, struct type *elm) \
643 { \
644 struct type *gpar, *sib, *up; \
645 __uintptr_t elmdir, sibdir; \
646 \
647 if (RB_RIGHT(parent, field) == elm && \
648 RB_LEFT(parent, field) == elm) { \
649 /* Deleting a leaf that is an only-child creates a \
650 * rank-2 leaf. Demote that leaf. */ \
651 _RB_UP(parent, field) = _RB_PTR(_RB_UP(parent, field)); \
652 elm = parent; \
653 if ((parent = _RB_UP(elm, field)) == NULL) \
654 return (NULL); \
655 } \
656 do { \
657 /* the rank of the tree rooted at elm shrank */ \
658 gpar = _RB_UP(parent, field); \
659 elmdir = RB_RIGHT(parent, field) == elm ? _RB_R : _RB_L; \
660 _RB_BITS(gpar) ^= elmdir; \
661 if (_RB_BITS(gpar) & elmdir) { \
662 /* lengthen the parent-elm edge to rebalance */ \
663 _RB_UP(parent, field) = gpar; \
664 return (NULL); \
665 } \
666 if (_RB_BITS(gpar) & _RB_LR) { \
667 /* shorten other edge, retry from parent */ \
668 _RB_BITS(gpar) ^= _RB_LR; \
669 _RB_UP(parent, field) = gpar; \
670 gpar = _RB_PTR(gpar); \
671 continue; \
672 } \
673 sibdir = elmdir ^ _RB_LR; \
674 sib = _RB_LINK(parent, sibdir, field); \
675 up = _RB_UP(sib, field); \
676 _RB_BITS(up) ^= _RB_LR; \
677 if ((_RB_BITS(up) & _RB_LR) == 0) { \
678 /* shorten edges descending from sib, retry */ \
679 _RB_UP(sib, field) = up; \
680 continue; \
681 } \
682 if ((_RB_BITS(up) & sibdir) == 0) { \
683 /* \
684 * The edge descending from 'sib' away from \
685 * 'parent' is long. The short edge descending \
686 * from 'sib' toward 'parent' points to 'elm*' \
687 * Rotate to make 'sib' a child of 'elm*' \
688 * then adjust the lengths of the edges \
689 * descending from 'sib' and 'elm*'. \
690 * \
691 * par par \
692 * / \ / \ \
693 * / sib elm \ \
694 * / / \ elm* \
695 * elm elm* \ / \ \
696 * / \ \ / \ \
697 * / \ z / \ \
698 * x y x sib \
699 * / \ \
700 * / z \
701 * y \
702 */ \
703 elm = _RB_LINK(sib, elmdir, field); \
704 /* elm is a 1-child. First rotate at elm. */ \
705 RB_ROTATE(sib, elm, sibdir, field); \
706 up = _RB_UP(elm, field); \
707 _RB_BITSUP(parent, field) ^= \
708 (_RB_BITS(up) & elmdir) ? _RB_LR : elmdir; \
709 _RB_BITSUP(sib, field) ^= \
710 (_RB_BITS(up) & sibdir) ? _RB_LR : sibdir; \
711 _RB_BITSUP(elm, field) |= _RB_LR; \
712 } else { \
713 if ((_RB_BITS(up) & elmdir) == 0 && \
714 RB_STRICT_HST && elm != NULL) { \
715 /* if parent does not become a leaf, \
716 do not demote parent yet. */ \
717 _RB_BITSUP(parent, field) ^= sibdir; \
718 _RB_BITSUP(sib, field) ^= _RB_LR; \
719 } else if ((_RB_BITS(up) & elmdir) == 0) { \
720 /* demote parent. */ \
721 _RB_BITSUP(parent, field) ^= elmdir; \
722 _RB_BITSUP(sib, field) ^= sibdir; \
723 } else \
724 _RB_BITSUP(sib, field) ^= sibdir; \
725 elm = sib; \
726 } \
727 \
728 /* \
729 * The edge descending from 'elm' away from 'parent' \
730 * is short. Rotate to make 'parent' a child of 'elm', \
731 * then lengthen the short edges descending from \
732 * 'parent' and 'elm' to rebalance. \
733 * \
734 * par elm \
735 * / \ / \ \
736 * e \ / \ \
737 * elm / \ \
738 * / \ par s \
739 * / \ / \ \
740 * / \ e \ \
741 * x s x \
742 */ \
743 RB_ROTATE(parent, elm, elmdir, field); \
744 RB_SET_PARENT(elm, gpar, field); \
745 RB_SWAP_CHILD(head, gpar, parent, elm, field); \
746 /* \
747 * An element rotated down, but not into the search \
748 * path has a new, smaller subtree, so update \
749 * augmentation for it. \
750 */ \
751 if (sib != elm) \
752 (void)RB_AUGMENT_CHECK(sib); \
753 return (parent); \
754 } while (elm = parent, (parent = gpar) != NULL); \
755 return (NULL); \
756 }
757
758 #define _RB_AUGMENT_WALK(elm, match, field) \
759 do { \
760 if (match == elm) \
761 match = NULL; \
762 } while (RB_AUGMENT_CHECK(elm) && \
763 (elm = RB_PARENT(elm, field)) != NULL)
764
765 #define RB_GENERATE_REMOVE(name, type, field, attr) \
766 attr struct type * \
767 name##_RB_REMOVE(struct name *head, struct type *out) \
768 { \
769 struct type *child, *in, *opar, *parent; \
770 \
771 child = RB_LEFT(out, field); \
772 in = RB_RIGHT(out, field); \
773 opar = _RB_UP(out, field); \
774 if (in == NULL || child == NULL) { \
775 in = child = (in == NULL ? child : in); \
776 parent = opar = _RB_PTR(opar); \
777 } else { \
778 parent = in; \
779 while (RB_LEFT(in, field)) \
780 in = RB_LEFT(in, field); \
781 RB_SET_PARENT(child, in, field); \
782 RB_LEFT(in, field) = child; \
783 child = RB_RIGHT(in, field); \
784 if (parent != in) { \
785 RB_SET_PARENT(parent, in, field); \
786 RB_RIGHT(in, field) = parent; \
787 parent = RB_PARENT(in, field); \
788 RB_LEFT(parent, field) = child; \
789 } \
790 _RB_UP(in, field) = opar; \
791 opar = _RB_PTR(opar); \
792 } \
793 RB_SWAP_CHILD(head, opar, out, in, field); \
794 if (child != NULL) \
795 _RB_UP(child, field) = parent; \
796 if (parent != NULL) { \
797 opar = name##_RB_REMOVE_COLOR(head, parent, child); \
798 /* if rotation has made 'parent' the root of the same \
799 * subtree as before, don't re-augment it. */ \
800 if (parent == in && RB_LEFT(parent, field) == NULL) { \
801 opar = NULL; \
802 parent = RB_PARENT(parent, field); \
803 } \
804 _RB_AUGMENT_WALK(parent, opar, field); \
805 if (opar != NULL) { \
806 /* \
807 * Elements rotated into the search path have \
808 * changed subtrees, so update augmentation for \
809 * them if AUGMENT_WALK didn't. \
810 */ \
811 (void)RB_AUGMENT_CHECK(opar); \
812 (void)RB_AUGMENT_CHECK(RB_PARENT(opar, field)); \
813 } \
814 } \
815 return (out); \
816 }
817
818 #define RB_GENERATE_INSERT_FINISH(name, type, field, attr) \
819 /* Inserts a node into the RB tree */ \
820 attr struct type * \
821 name##_RB_INSERT_FINISH(struct name *head, struct type *parent, \
822 struct type **pptr, struct type *elm) \
823 { \
824 struct type *tmp = NULL; \
825 \
826 RB_SET(elm, parent, field); \
827 *pptr = elm; \
828 if (parent != NULL) \
829 tmp = name##_RB_INSERT_COLOR(head, parent, elm); \
830 _RB_AUGMENT_WALK(elm, tmp, field); \
831 if (tmp != NULL) \
832 /* \
833 * An element rotated into the search path has a \
834 * changed subtree, so update augmentation for it if \
835 * AUGMENT_WALK didn't. \
836 */ \
837 (void)RB_AUGMENT_CHECK(tmp); \
838 return (NULL); \
839 }
840
841 #define RB_GENERATE_INSERT(name, type, field, cmp, attr) \
842 /* Inserts a node into the RB tree */ \
843 attr struct type * \
844 name##_RB_INSERT(struct name *head, struct type *elm) \
845 { \
846 struct type *tmp; \
847 struct type **tmpp = &RB_ROOT(head); \
848 struct type *parent = NULL; \
849 \
850 while ((tmp = *tmpp) != NULL) { \
851 parent = tmp; \
852 __typeof(cmp(NULL, NULL)) comp = (cmp)(elm, parent); \
853 if (comp < 0) \
854 tmpp = &RB_LEFT(parent, field); \
855 else if (comp > 0) \
856 tmpp = &RB_RIGHT(parent, field); \
857 else \
858 return (parent); \
859 } \
860 return (name##_RB_INSERT_FINISH(head, parent, tmpp, elm)); \
861 }
862
863 #define RB_GENERATE_FIND(name, type, field, cmp, attr) \
864 /* Finds the node with the same key as elm */ \
865 attr struct type * \
866 name##_RB_FIND(struct name *head, struct type *elm) \
867 { \
868 struct type *tmp = RB_ROOT(head); \
869 __typeof(cmp(NULL, NULL)) comp; \
870 while (tmp) { \
871 comp = cmp(elm, tmp); \
872 if (comp < 0) \
873 tmp = RB_LEFT(tmp, field); \
874 else if (comp > 0) \
875 tmp = RB_RIGHT(tmp, field); \
876 else \
877 return (tmp); \
878 } \
879 return (NULL); \
880 }
881
882 #define RB_GENERATE_NFIND(name, type, field, cmp, attr) \
883 /* Finds the first node greater than or equal to the search key */ \
884 attr struct type * \
885 name##_RB_NFIND(struct name *head, struct type *elm) \
886 { \
887 struct type *tmp = RB_ROOT(head); \
888 struct type *res = NULL; \
889 __typeof(cmp(NULL, NULL)) comp; \
890 while (tmp) { \
891 comp = cmp(elm, tmp); \
892 if (comp < 0) { \
893 res = tmp; \
894 tmp = RB_LEFT(tmp, field); \
895 } \
896 else if (comp > 0) \
897 tmp = RB_RIGHT(tmp, field); \
898 else \
899 return (tmp); \
900 } \
901 return (res); \
902 }
903
904 #define RB_GENERATE_NEXT(name, type, field, attr) \
905 /* ARGSUSED */ \
906 attr struct type * \
907 name##_RB_NEXT(struct type *elm) \
908 { \
909 if (RB_RIGHT(elm, field)) { \
910 elm = RB_RIGHT(elm, field); \
911 while (RB_LEFT(elm, field)) \
912 elm = RB_LEFT(elm, field); \
913 } else { \
914 while (RB_PARENT(elm, field) && \
915 (elm == RB_RIGHT(RB_PARENT(elm, field), field))) \
916 elm = RB_PARENT(elm, field); \
917 elm = RB_PARENT(elm, field); \
918 } \
919 return (elm); \
920 }
921
922 #if defined(_KERNEL) && defined(DIAGNOSTIC)
923 #define _RB_ORDER_CHECK(cmp, lo, hi) do { \
924 KASSERT((cmp)(lo, hi) < 0, ("out of order insertion")); \
925 } while (0)
926 #else
927 #define _RB_ORDER_CHECK(cmp, lo, hi) do {} while (0)
928 #endif
929
930 #define RB_GENERATE_INSERT_NEXT(name, type, field, cmp, attr) \
931 /* Inserts a node into the next position in the RB tree */ \
932 attr struct type * \
933 name##_RB_INSERT_NEXT(struct name *head, \
934 struct type *elm, struct type *next) \
935 { \
936 struct type *tmp; \
937 struct type **tmpp = &RB_RIGHT(elm, field); \
938 \
939 _RB_ORDER_CHECK(cmp, elm, next); \
940 if (name##_RB_NEXT(elm) != NULL) \
941 _RB_ORDER_CHECK(cmp, next, name##_RB_NEXT(elm)); \
942 while ((tmp = *tmpp) != NULL) { \
943 elm = tmp; \
944 tmpp = &RB_LEFT(elm, field); \
945 } \
946 return (name##_RB_INSERT_FINISH(head, elm, tmpp, next)); \
947 }
948
949 #define RB_GENERATE_PREV(name, type, field, attr) \
950 /* ARGSUSED */ \
951 attr struct type * \
952 name##_RB_PREV(struct type *elm) \
953 { \
954 if (RB_LEFT(elm, field)) { \
955 elm = RB_LEFT(elm, field); \
956 while (RB_RIGHT(elm, field)) \
957 elm = RB_RIGHT(elm, field); \
958 } else { \
959 while (RB_PARENT(elm, field) && \
960 (elm == RB_LEFT(RB_PARENT(elm, field), field))) \
961 elm = RB_PARENT(elm, field); \
962 elm = RB_PARENT(elm, field); \
963 } \
964 return (elm); \
965 }
966
967 #define RB_GENERATE_INSERT_PREV(name, type, field, cmp, attr) \
968 /* Inserts a node into the prev position in the RB tree */ \
969 attr struct type * \
970 name##_RB_INSERT_PREV(struct name *head, \
971 struct type *elm, struct type *prev) \
972 { \
973 struct type *tmp; \
974 struct type **tmpp = &RB_LEFT(elm, field); \
975 \
976 _RB_ORDER_CHECK(cmp, prev, elm); \
977 if (name##_RB_PREV(elm) != NULL) \
978 _RB_ORDER_CHECK(cmp, name##_RB_PREV(elm), prev); \
979 while ((tmp = *tmpp) != NULL) { \
980 elm = tmp; \
981 tmpp = &RB_RIGHT(elm, field); \
982 } \
983 return (name##_RB_INSERT_FINISH(head, elm, tmpp, prev)); \
984 }
985
986 #define RB_GENERATE_MINMAX(name, type, field, attr) \
987 attr struct type * \
988 name##_RB_MINMAX(struct name *head, int val) \
989 { \
990 struct type *tmp = RB_ROOT(head); \
991 struct type *parent = NULL; \
992 while (tmp) { \
993 parent = tmp; \
994 if (val < 0) \
995 tmp = RB_LEFT(tmp, field); \
996 else \
997 tmp = RB_RIGHT(tmp, field); \
998 } \
999 return (parent); \
1000 }
1001
1002 #define RB_GENERATE_REINSERT(name, type, field, cmp, attr) \
1003 attr struct type * \
1004 name##_RB_REINSERT(struct name *head, struct type *elm) \
1005 { \
1006 struct type *cmpelm; \
1007 if (((cmpelm = RB_PREV(name, head, elm)) != NULL && \
1008 cmp(cmpelm, elm) >= 0) || \
1009 ((cmpelm = RB_NEXT(name, head, elm)) != NULL && \
1010 cmp(elm, cmpelm) >= 0)) { \
1011 /* XXXLAS: Remove/insert is heavy handed. */ \
1012 RB_REMOVE(name, head, elm); \
1013 return (RB_INSERT(name, head, elm)); \
1014 } \
1015 return (NULL); \
1016 } \
1017
1018 #define RB_NEGINF -1
1019 #define RB_INF 1
1020
1021 #define RB_INSERT(name, x, y) name##_RB_INSERT(x, y)
1022 #define RB_INSERT_NEXT(name, x, y, z) name##_RB_INSERT_NEXT(x, y, z)
1023 #define RB_INSERT_PREV(name, x, y, z) name##_RB_INSERT_PREV(x, y, z)
1024 #define RB_REMOVE(name, x, y) name##_RB_REMOVE(x, y)
1025 #define RB_FIND(name, x, y) name##_RB_FIND(x, y)
1026 #define RB_NFIND(name, x, y) name##_RB_NFIND(x, y)
1027 #define RB_NEXT(name, x, y) name##_RB_NEXT(y)
1028 #define RB_PREV(name, x, y) name##_RB_PREV(y)
1029 #define RB_MIN(name, x) name##_RB_MINMAX(x, RB_NEGINF)
1030 #define RB_MAX(name, x) name##_RB_MINMAX(x, RB_INF)
1031 #define RB_REINSERT(name, x, y) name##_RB_REINSERT(x, y)
1032
1033 #define RB_FOREACH(x, name, head) \
1034 for ((x) = RB_MIN(name, head); \
1035 (x) != NULL; \
1036 (x) = name##_RB_NEXT(x))
1037
1038 #define RB_FOREACH_FROM(x, name, y) \
1039 for ((x) = (y); \
1040 ((x) != NULL) && ((y) = name##_RB_NEXT(x), (x) != NULL); \
1041 (x) = (y))
1042
1043 #define RB_FOREACH_SAFE(x, name, head, y) \
1044 for ((x) = RB_MIN(name, head); \
1045 ((x) != NULL) && ((y) = name##_RB_NEXT(x), (x) != NULL); \
1046 (x) = (y))
1047
1048 #define RB_FOREACH_REVERSE(x, name, head) \
1049 for ((x) = RB_MAX(name, head); \
1050 (x) != NULL; \
1051 (x) = name##_RB_PREV(x))
1052
1053 #define RB_FOREACH_REVERSE_FROM(x, name, y) \
1054 for ((x) = (y); \
1055 ((x) != NULL) && ((y) = name##_RB_PREV(x), (x) != NULL); \
1056 (x) = (y))
1057
1058 #define RB_FOREACH_REVERSE_SAFE(x, name, head, y) \
1059 for ((x) = RB_MAX(name, head); \
1060 ((x) != NULL) && ((y) = name##_RB_PREV(x), (x) != NULL); \
1061 (x) = (y))
1062
1063 #endif /* _SYS_TREE_H_ */
Cache object: f7f694ac061ac7c41a0b0ace0c754943
|