Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

rbtree: Add generic add and find helpers

I've always been bothered by the endless (fragile) boilerplate for
rbtree, and I recently wrote some rbtree helpers for objtool and
figured I should lift them into the kernel and use them more widely.

Provide:

partial-order; less() based:
- rb_add(): add a new entry to the rbtree
- rb_add_cached(): like rb_add(), but for a rb_root_cached

total-order; cmp() based:
- rb_find(): find an entry in an rbtree
- rb_find_add(): find an entry, and add if not found

- rb_find_first(): find the first (leftmost) matching entry
- rb_next_match(): continue from rb_find_first()
- rb_for_each(): iterate a sub-tree using the previous two

Inlining and constant propagation should see the compiler inline the
whole thing, including the various compare functions.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Reviewed-by: Michel Lespinasse <walken@google.com>
Acked-by: Davidlohr Bueso <dbueso@suse.de>

authored by

Peter Zijlstra and committed by
Ingo Molnar
2d24dd57 9fe1f127

+392 -63
+190
include/linux/rbtree.h
··· 158 158 rb_replace_node(victim, new, &root->rb_root); 159 159 } 160 160 161 + /* 162 + * The below helper functions use 2 operators with 3 different 163 + * calling conventions. The operators are related like: 164 + * 165 + * comp(a->key,b) < 0 := less(a,b) 166 + * comp(a->key,b) > 0 := less(b,a) 167 + * comp(a->key,b) == 0 := !less(a,b) && !less(b,a) 168 + * 169 + * If these operators define a partial order on the elements we make no 170 + * guarantee on which of the elements matching the key is found. See 171 + * rb_find(). 172 + * 173 + * The reason for this is to allow the find() interface without requiring an 174 + * on-stack dummy object, which might not be feasible due to object size. 175 + */ 176 + 177 + /** 178 + * rb_add_cached() - insert @node into the leftmost cached tree @tree 179 + * @node: node to insert 180 + * @tree: leftmost cached tree to insert @node into 181 + * @less: operator defining the (partial) node order 182 + */ 183 + static __always_inline void 184 + rb_add_cached(struct rb_node *node, struct rb_root_cached *tree, 185 + bool (*less)(struct rb_node *, const struct rb_node *)) 186 + { 187 + struct rb_node **link = &tree->rb_root.rb_node; 188 + struct rb_node *parent = NULL; 189 + bool leftmost = true; 190 + 191 + while (*link) { 192 + parent = *link; 193 + if (less(node, parent)) { 194 + link = &parent->rb_left; 195 + } else { 196 + link = &parent->rb_right; 197 + leftmost = false; 198 + } 199 + } 200 + 201 + rb_link_node(node, parent, link); 202 + rb_insert_color_cached(node, tree, leftmost); 203 + } 204 + 205 + /** 206 + * rb_add() - insert @node into @tree 207 + * @node: node to insert 208 + * @tree: tree to insert @node into 209 + * @less: operator defining the (partial) node order 210 + */ 211 + static __always_inline void 212 + rb_add(struct rb_node *node, struct rb_root *tree, 213 + bool (*less)(struct rb_node *, const struct rb_node *)) 214 + { 215 + struct rb_node **link = &tree->rb_node; 216 + struct rb_node *parent = NULL; 217 + 218 + while (*link) { 219 + parent = *link; 220 + if (less(node, parent)) 221 + link = &parent->rb_left; 222 + else 223 + link = &parent->rb_right; 224 + } 225 + 226 + rb_link_node(node, parent, link); 227 + rb_insert_color(node, tree); 228 + } 229 + 230 + /** 231 + * rb_find_add() - find equivalent @node in @tree, or add @node 232 + * @node: node to look-for / insert 233 + * @tree: tree to search / modify 234 + * @cmp: operator defining the node order 235 + * 236 + * Returns the rb_node matching @node, or NULL when no match is found and @node 237 + * is inserted. 238 + */ 239 + static __always_inline struct rb_node * 240 + rb_find_add(struct rb_node *node, struct rb_root *tree, 241 + int (*cmp)(struct rb_node *, const struct rb_node *)) 242 + { 243 + struct rb_node **link = &tree->rb_node; 244 + struct rb_node *parent = NULL; 245 + int c; 246 + 247 + while (*link) { 248 + parent = *link; 249 + c = cmp(node, parent); 250 + 251 + if (c < 0) 252 + link = &parent->rb_left; 253 + else if (c > 0) 254 + link = &parent->rb_right; 255 + else 256 + return parent; 257 + } 258 + 259 + rb_link_node(node, parent, link); 260 + rb_insert_color(node, tree); 261 + return NULL; 262 + } 263 + 264 + /** 265 + * rb_find() - find @key in tree @tree 266 + * @key: key to match 267 + * @tree: tree to search 268 + * @cmp: operator defining the node order 269 + * 270 + * Returns the rb_node matching @key or NULL. 271 + */ 272 + static __always_inline struct rb_node * 273 + rb_find(const void *key, const struct rb_root *tree, 274 + int (*cmp)(const void *key, const struct rb_node *)) 275 + { 276 + struct rb_node *node = tree->rb_node; 277 + 278 + while (node) { 279 + int c = cmp(key, node); 280 + 281 + if (c < 0) 282 + node = node->rb_left; 283 + else if (c > 0) 284 + node = node->rb_right; 285 + else 286 + return node; 287 + } 288 + 289 + return NULL; 290 + } 291 + 292 + /** 293 + * rb_find_first() - find the first @key in @tree 294 + * @key: key to match 295 + * @tree: tree to search 296 + * @cmp: operator defining node order 297 + * 298 + * Returns the leftmost node matching @key, or NULL. 299 + */ 300 + static __always_inline struct rb_node * 301 + rb_find_first(const void *key, const struct rb_root *tree, 302 + int (*cmp)(const void *key, const struct rb_node *)) 303 + { 304 + struct rb_node *node = tree->rb_node; 305 + struct rb_node *match = NULL; 306 + 307 + while (node) { 308 + int c = cmp(key, node); 309 + 310 + if (c <= 0) { 311 + if (!c) 312 + match = node; 313 + node = node->rb_left; 314 + } else if (c > 0) { 315 + node = node->rb_right; 316 + } 317 + } 318 + 319 + return match; 320 + } 321 + 322 + /** 323 + * rb_next_match() - find the next @key in @tree 324 + * @key: key to match 325 + * @tree: tree to search 326 + * @cmp: operator defining node order 327 + * 328 + * Returns the next node matching @key, or NULL. 329 + */ 330 + static __always_inline struct rb_node * 331 + rb_next_match(const void *key, struct rb_node *node, 332 + int (*cmp)(const void *key, const struct rb_node *)) 333 + { 334 + node = rb_next(node); 335 + if (node && cmp(key, node)) 336 + node = NULL; 337 + return node; 338 + } 339 + 340 + /** 341 + * rb_for_each() - iterates a subtree matching @key 342 + * @node: iterator 343 + * @key: key to match 344 + * @tree: tree to search 345 + * @cmp: operator defining node order 346 + */ 347 + #define rb_for_each(node, key, tree, cmp) \ 348 + for ((node) = rb_find_first((key), (tree), (cmp)); \ 349 + (node); (node) = rb_next_match((key), (node), (cmp))) 350 + 161 351 #endif /* _LINUX_RBTREE_H */
+191 -1
tools/include/linux/rbtree.h
··· 152 152 rb_replace_node(victim, new, &root->rb_root); 153 153 } 154 154 155 - #endif /* __TOOLS_LINUX_PERF_RBTREE_H */ 155 + /* 156 + * The below helper functions use 2 operators with 3 different 157 + * calling conventions. The operators are related like: 158 + * 159 + * comp(a->key,b) < 0 := less(a,b) 160 + * comp(a->key,b) > 0 := less(b,a) 161 + * comp(a->key,b) == 0 := !less(a,b) && !less(b,a) 162 + * 163 + * If these operators define a partial order on the elements we make no 164 + * guarantee on which of the elements matching the key is found. See 165 + * rb_find(). 166 + * 167 + * The reason for this is to allow the find() interface without requiring an 168 + * on-stack dummy object, which might not be feasible due to object size. 169 + */ 170 + 171 + /** 172 + * rb_add_cached() - insert @node into the leftmost cached tree @tree 173 + * @node: node to insert 174 + * @tree: leftmost cached tree to insert @node into 175 + * @less: operator defining the (partial) node order 176 + */ 177 + static __always_inline void 178 + rb_add_cached(struct rb_node *node, struct rb_root_cached *tree, 179 + bool (*less)(struct rb_node *, const struct rb_node *)) 180 + { 181 + struct rb_node **link = &tree->rb_root.rb_node; 182 + struct rb_node *parent = NULL; 183 + bool leftmost = true; 184 + 185 + while (*link) { 186 + parent = *link; 187 + if (less(node, parent)) { 188 + link = &parent->rb_left; 189 + } else { 190 + link = &parent->rb_right; 191 + leftmost = false; 192 + } 193 + } 194 + 195 + rb_link_node(node, parent, link); 196 + rb_insert_color_cached(node, tree, leftmost); 197 + } 198 + 199 + /** 200 + * rb_add() - insert @node into @tree 201 + * @node: node to insert 202 + * @tree: tree to insert @node into 203 + * @less: operator defining the (partial) node order 204 + */ 205 + static __always_inline void 206 + rb_add(struct rb_node *node, struct rb_root *tree, 207 + bool (*less)(struct rb_node *, const struct rb_node *)) 208 + { 209 + struct rb_node **link = &tree->rb_node; 210 + struct rb_node *parent = NULL; 211 + 212 + while (*link) { 213 + parent = *link; 214 + if (less(node, parent)) 215 + link = &parent->rb_left; 216 + else 217 + link = &parent->rb_right; 218 + } 219 + 220 + rb_link_node(node, parent, link); 221 + rb_insert_color(node, tree); 222 + } 223 + 224 + /** 225 + * rb_find_add() - find equivalent @node in @tree, or add @node 226 + * @node: node to look-for / insert 227 + * @tree: tree to search / modify 228 + * @cmp: operator defining the node order 229 + * 230 + * Returns the rb_node matching @node, or NULL when no match is found and @node 231 + * is inserted. 232 + */ 233 + static __always_inline struct rb_node * 234 + rb_find_add(struct rb_node *node, struct rb_root *tree, 235 + int (*cmp)(struct rb_node *, const struct rb_node *)) 236 + { 237 + struct rb_node **link = &tree->rb_node; 238 + struct rb_node *parent = NULL; 239 + int c; 240 + 241 + while (*link) { 242 + parent = *link; 243 + c = cmp(node, parent); 244 + 245 + if (c < 0) 246 + link = &parent->rb_left; 247 + else if (c > 0) 248 + link = &parent->rb_right; 249 + else 250 + return parent; 251 + } 252 + 253 + rb_link_node(node, parent, link); 254 + rb_insert_color(node, tree); 255 + return NULL; 256 + } 257 + 258 + /** 259 + * rb_find() - find @key in tree @tree 260 + * @key: key to match 261 + * @tree: tree to search 262 + * @cmp: operator defining the node order 263 + * 264 + * Returns the rb_node matching @key or NULL. 265 + */ 266 + static __always_inline struct rb_node * 267 + rb_find(const void *key, const struct rb_root *tree, 268 + int (*cmp)(const void *key, const struct rb_node *)) 269 + { 270 + struct rb_node *node = tree->rb_node; 271 + 272 + while (node) { 273 + int c = cmp(key, node); 274 + 275 + if (c < 0) 276 + node = node->rb_left; 277 + else if (c > 0) 278 + node = node->rb_right; 279 + else 280 + return node; 281 + } 282 + 283 + return NULL; 284 + } 285 + 286 + /** 287 + * rb_find_first() - find the first @key in @tree 288 + * @key: key to match 289 + * @tree: tree to search 290 + * @cmp: operator defining node order 291 + * 292 + * Returns the leftmost node matching @key, or NULL. 293 + */ 294 + static __always_inline struct rb_node * 295 + rb_find_first(const void *key, const struct rb_root *tree, 296 + int (*cmp)(const void *key, const struct rb_node *)) 297 + { 298 + struct rb_node *node = tree->rb_node; 299 + struct rb_node *match = NULL; 300 + 301 + while (node) { 302 + int c = cmp(key, node); 303 + 304 + if (c <= 0) { 305 + if (!c) 306 + match = node; 307 + node = node->rb_left; 308 + } else if (c > 0) { 309 + node = node->rb_right; 310 + } 311 + } 312 + 313 + return match; 314 + } 315 + 316 + /** 317 + * rb_next_match() - find the next @key in @tree 318 + * @key: key to match 319 + * @tree: tree to search 320 + * @cmp: operator defining node order 321 + * 322 + * Returns the next node matching @key, or NULL. 323 + */ 324 + static __always_inline struct rb_node * 325 + rb_next_match(const void *key, struct rb_node *node, 326 + int (*cmp)(const void *key, const struct rb_node *)) 327 + { 328 + node = rb_next(node); 329 + if (node && cmp(key, node)) 330 + node = NULL; 331 + return node; 332 + } 333 + 334 + /** 335 + * rb_for_each() - iterates a subtree matching @key 336 + * @node: iterator 337 + * @key: key to match 338 + * @tree: tree to search 339 + * @cmp: operator defining node order 340 + */ 341 + #define rb_for_each(node, key, tree, cmp) \ 342 + for ((node) = rb_find_first((key), (tree), (cmp)); \ 343 + (node); (node) = rb_next_match((key), (node), (cmp))) 344 + 345 + #endif /* __TOOLS_LINUX_PERF_RBTREE_H */
+11 -62
tools/objtool/elf.c
··· 43 43 #define elf_hash_for_each_possible(name, obj, member, key) \ 44 44 hlist_for_each_entry(obj, &name[hash_min(key, elf_hash_bits())], member) 45 45 46 - static void rb_add(struct rb_root *tree, struct rb_node *node, 47 - int (*cmp)(struct rb_node *, const struct rb_node *)) 48 - { 49 - struct rb_node **link = &tree->rb_node; 50 - struct rb_node *parent = NULL; 51 - 52 - while (*link) { 53 - parent = *link; 54 - if (cmp(node, parent) < 0) 55 - link = &parent->rb_left; 56 - else 57 - link = &parent->rb_right; 58 - } 59 - 60 - rb_link_node(node, parent, link); 61 - rb_insert_color(node, tree); 62 - } 63 - 64 - static struct rb_node *rb_find_first(const struct rb_root *tree, const void *key, 65 - int (*cmp)(const void *key, const struct rb_node *)) 66 - { 67 - struct rb_node *node = tree->rb_node; 68 - struct rb_node *match = NULL; 69 - 70 - while (node) { 71 - int c = cmp(key, node); 72 - if (c <= 0) { 73 - if (!c) 74 - match = node; 75 - node = node->rb_left; 76 - } else if (c > 0) { 77 - node = node->rb_right; 78 - } 79 - } 80 - 81 - return match; 82 - } 83 - 84 - static struct rb_node *rb_next_match(struct rb_node *node, const void *key, 85 - int (*cmp)(const void *key, const struct rb_node *)) 86 - { 87 - node = rb_next(node); 88 - if (node && cmp(key, node)) 89 - node = NULL; 90 - return node; 91 - } 92 - 93 - #define rb_for_each(tree, node, key, cmp) \ 94 - for ((node) = rb_find_first((tree), (key), (cmp)); \ 95 - (node); (node) = rb_next_match((node), (key), (cmp))) 96 - 97 - static int symbol_to_offset(struct rb_node *a, const struct rb_node *b) 46 + static bool symbol_to_offset(struct rb_node *a, const struct rb_node *b) 98 47 { 99 48 struct symbol *sa = rb_entry(a, struct symbol, node); 100 49 struct symbol *sb = rb_entry(b, struct symbol, node); 101 50 102 51 if (sa->offset < sb->offset) 103 - return -1; 52 + return true; 104 53 if (sa->offset > sb->offset) 105 - return 1; 54 + return false; 106 55 107 56 if (sa->len < sb->len) 108 - return -1; 57 + return true; 109 58 if (sa->len > sb->len) 110 - return 1; 59 + return false; 111 60 112 61 sa->alias = sb; 113 62 114 - return 0; 63 + return false; 115 64 } 116 65 117 66 static int symbol_by_offset(const void *key, const struct rb_node *node) ··· 114 165 { 115 166 struct rb_node *node; 116 167 117 - rb_for_each(&sec->symbol_tree, node, &offset, symbol_by_offset) { 168 + rb_for_each(node, &offset, &sec->symbol_tree, symbol_by_offset) { 118 169 struct symbol *s = rb_entry(node, struct symbol, node); 119 170 120 171 if (s->offset == offset && s->type != STT_SECTION) ··· 128 179 { 129 180 struct rb_node *node; 130 181 131 - rb_for_each(&sec->symbol_tree, node, &offset, symbol_by_offset) { 182 + rb_for_each(node, &offset, &sec->symbol_tree, symbol_by_offset) { 132 183 struct symbol *s = rb_entry(node, struct symbol, node); 133 184 134 185 if (s->offset == offset && s->type == STT_FUNC) ··· 142 193 { 143 194 struct rb_node *node; 144 195 145 - rb_for_each(&sec->symbol_tree, node, &offset, symbol_by_offset) { 196 + rb_for_each(node, &offset, &sec->symbol_tree, symbol_by_offset) { 146 197 struct symbol *s = rb_entry(node, struct symbol, node); 147 198 148 199 if (s->type != STT_SECTION) ··· 156 207 { 157 208 struct rb_node *node; 158 209 159 - rb_for_each(&sec->symbol_tree, node, &offset, symbol_by_offset) { 210 + rb_for_each(node, &offset, &sec->symbol_tree, symbol_by_offset) { 160 211 struct symbol *s = rb_entry(node, struct symbol, node); 161 212 162 213 if (s->type == STT_FUNC) ··· 391 442 sym->offset = sym->sym.st_value; 392 443 sym->len = sym->sym.st_size; 393 444 394 - rb_add(&sym->sec->symbol_tree, &sym->node, symbol_to_offset); 445 + rb_add(&sym->node, &sym->sec->symbol_tree, symbol_to_offset); 395 446 pnode = rb_prev(&sym->node); 396 447 if (pnode) 397 448 entry = &rb_entry(pnode, struct symbol, node)->list;