Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

radix tree: Remove multiorder support

All users have now been converted to the XArray. Removing the support
reduces code size and ensures new users will use the XArray instead.

Signed-off-by: Matthew Wilcox <willy@infradead.org>

+19 -245
+4 -36
include/linux/radix-tree.h
··· 96 96 * @next_index: one beyond the last index for this chunk 97 97 * @tags: bit-mask for tag-iterating 98 98 * @node: node that contains current slot 99 - * @shift: shift for the node that holds our slots 100 99 * 101 100 * This radix tree iterator works in terms of "chunks" of slots. A chunk is a 102 101 * subinterval of slots contained within one radix tree leaf node. It is ··· 109 110 unsigned long next_index; 110 111 unsigned long tags; 111 112 struct radix_tree_node *node; 112 - #ifdef CONFIG_RADIX_TREE_MULTIORDER 113 - unsigned int shift; 114 - #endif 115 113 }; 116 - 117 - static inline unsigned int iter_shift(const struct radix_tree_iter *iter) 118 - { 119 - #ifdef CONFIG_RADIX_TREE_MULTIORDER 120 - return iter->shift; 121 - #else 122 - return 0; 123 - #endif 124 - } 125 114 126 115 /** 127 116 * Radix-tree synchronization ··· 217 230 return unlikely((unsigned long)arg & RADIX_TREE_ENTRY_MASK); 218 231 } 219 232 220 - int __radix_tree_insert(struct radix_tree_root *, unsigned long index, 221 - unsigned order, void *); 222 - static inline int radix_tree_insert(struct radix_tree_root *root, 223 - unsigned long index, void *entry) 224 - { 225 - return __radix_tree_insert(root, index, 0, entry); 226 - } 233 + int radix_tree_insert(struct radix_tree_root *, unsigned long index, 234 + void *); 227 235 void *__radix_tree_lookup(const struct radix_tree_root *, unsigned long index, 228 236 struct radix_tree_node **nodep, void __rcu ***slotp); 229 237 void *radix_tree_lookup(const struct radix_tree_root *, unsigned long); ··· 366 384 static inline unsigned long 367 385 __radix_tree_iter_add(struct radix_tree_iter *iter, unsigned long slots) 368 386 { 369 - return iter->index + (slots << iter_shift(iter)); 387 + return iter->index + slots; 370 388 } 371 389 372 390 /** ··· 391 409 static __always_inline long 392 410 radix_tree_chunk_size(struct radix_tree_iter *iter) 393 411 { 394 - return (iter->next_index - iter->index) >> iter_shift(iter); 412 + return iter->next_index - iter->index; 395 413 } 396 - 397 - #ifdef CONFIG_RADIX_TREE_MULTIORDER 398 - void __rcu **__radix_tree_next_slot(void __rcu **slot, 399 - struct radix_tree_iter *iter, unsigned flags); 400 - #else 401 - /* Can't happen without sibling entries, but the compiler can't tell that */ 402 - static inline void __rcu **__radix_tree_next_slot(void __rcu **slot, 403 - struct radix_tree_iter *iter, unsigned flags) 404 - { 405 - return slot; 406 - } 407 - #endif 408 414 409 415 /** 410 416 * radix_tree_next_slot - find next slot in chunk ··· 452 482 return NULL; 453 483 454 484 found: 455 - if (unlikely(radix_tree_is_internal_node(rcu_dereference_raw(*slot)))) 456 - return __radix_tree_next_slot(slot, iter, flags); 457 485 return slot; 458 486 } 459 487
-4
lib/Kconfig
··· 405 405 Support entries which occupy multiple consecutive indices in the 406 406 XArray. 407 407 408 - config RADIX_TREE_MULTIORDER 409 - bool 410 - select XARRAY_MULTI 411 - 412 408 config ASSOCIATIVE_ARRAY 413 409 bool 414 410 help
+13 -202
lib/radix-tree.c
··· 110 110 unsigned int offset = (index >> parent->shift) & RADIX_TREE_MAP_MASK; 111 111 void __rcu **entry = rcu_dereference_raw(parent->slots[offset]); 112 112 113 - if (xa_is_sibling(entry)) { 114 - offset = xa_to_sibling(entry); 115 - entry = rcu_dereference_raw(parent->slots[offset]); 116 - } 117 - 118 113 *nodep = (void *)entry; 119 114 return offset; 120 115 } ··· 224 229 225 230 static unsigned int iter_offset(const struct radix_tree_iter *iter) 226 231 { 227 - return (iter->index >> iter_shift(iter)) & RADIX_TREE_MAP_MASK; 232 + return iter->index & RADIX_TREE_MAP_MASK; 228 233 } 229 234 230 235 /* ··· 501 506 502 507 /* 503 508 * The candidate node has more than one child, or its child 504 - * is not at the leftmost slot, or the child is a multiorder 505 - * entry, we cannot shrink. 509 + * is not at the leftmost slot, we cannot shrink. 506 510 */ 507 511 if (node->count != 1) 508 512 break; 509 513 child = rcu_dereference_raw(node->slots[0]); 510 514 if (!child) 511 - break; 512 - if (!radix_tree_is_internal_node(child) && node->shift) 513 515 break; 514 516 515 517 /* ··· 605 613 * __radix_tree_create - create a slot in a radix tree 606 614 * @root: radix tree root 607 615 * @index: index key 608 - * @order: index occupies 2^order aligned slots 609 616 * @nodep: returns node 610 617 * @slotp: returns slot 611 618 * ··· 618 627 * Returns -ENOMEM, or 0 for success. 619 628 */ 620 629 static int __radix_tree_create(struct radix_tree_root *root, 621 - unsigned long index, unsigned order, 622 - struct radix_tree_node **nodep, void __rcu ***slotp) 630 + unsigned long index, struct radix_tree_node **nodep, 631 + void __rcu ***slotp) 623 632 { 624 633 struct radix_tree_node *node = NULL, *child; 625 634 void __rcu **slot = (void __rcu **)&root->xa_head; 626 635 unsigned long maxindex; 627 636 unsigned int shift, offset = 0; 628 - unsigned long max = index | ((1UL << order) - 1); 637 + unsigned long max = index; 629 638 gfp_t gfp = root_gfp_mask(root); 630 639 631 640 shift = radix_tree_load_root(root, &child, &maxindex); 632 641 633 642 /* Make sure the tree is high enough. */ 634 - if (order > 0 && max == ((1UL << order) - 1)) 635 - max++; 636 643 if (max > maxindex) { 637 644 int error = radix_tree_extend(root, gfp, max, shift); 638 645 if (error < 0) ··· 639 650 child = rcu_dereference_raw(root->xa_head); 640 651 } 641 652 642 - while (shift > order) { 653 + while (shift > 0) { 643 654 shift -= RADIX_TREE_MAP_SHIFT; 644 655 if (child == NULL) { 645 656 /* Have to add a child node. */ ··· 700 711 } 701 712 } 702 713 703 - #ifdef CONFIG_RADIX_TREE_MULTIORDER 704 714 static inline int insert_entries(struct radix_tree_node *node, 705 - void __rcu **slot, void *item, unsigned order, bool replace) 706 - { 707 - void *sibling; 708 - unsigned i, n, tag, offset, tags = 0; 709 - 710 - if (node) { 711 - if (order > node->shift) 712 - n = 1 << (order - node->shift); 713 - else 714 - n = 1; 715 - offset = get_slot_offset(node, slot); 716 - } else { 717 - n = 1; 718 - offset = 0; 719 - } 720 - 721 - if (n > 1) { 722 - offset = offset & ~(n - 1); 723 - slot = &node->slots[offset]; 724 - } 725 - sibling = xa_mk_sibling(offset); 726 - 727 - for (i = 0; i < n; i++) { 728 - if (slot[i]) { 729 - if (replace) { 730 - node->count--; 731 - for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) 732 - if (tag_get(node, tag, offset + i)) 733 - tags |= 1 << tag; 734 - } else 735 - return -EEXIST; 736 - } 737 - } 738 - 739 - for (i = 0; i < n; i++) { 740 - struct radix_tree_node *old = rcu_dereference_raw(slot[i]); 741 - if (i) { 742 - rcu_assign_pointer(slot[i], sibling); 743 - for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) 744 - if (tags & (1 << tag)) 745 - tag_clear(node, tag, offset + i); 746 - } else { 747 - rcu_assign_pointer(slot[i], item); 748 - for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) 749 - if (tags & (1 << tag)) 750 - tag_set(node, tag, offset); 751 - } 752 - if (xa_is_node(old)) 753 - radix_tree_free_nodes(old); 754 - if (xa_is_value(old)) 755 - node->nr_values--; 756 - } 757 - if (node) { 758 - node->count += n; 759 - if (xa_is_value(item)) 760 - node->nr_values += n; 761 - } 762 - return n; 763 - } 764 - #else 765 - static inline int insert_entries(struct radix_tree_node *node, 766 - void __rcu **slot, void *item, unsigned order, bool replace) 715 + void __rcu **slot, void *item, bool replace) 767 716 { 768 717 if (*slot) 769 718 return -EEXIST; ··· 713 786 } 714 787 return 1; 715 788 } 716 - #endif 717 789 718 790 /** 719 791 * __radix_tree_insert - insert into a radix tree 720 792 * @root: radix tree root 721 793 * @index: index key 722 - * @order: key covers the 2^order indices around index 723 794 * @item: item to insert 724 795 * 725 796 * Insert an item into the radix tree at position @index. 726 797 */ 727 - int __radix_tree_insert(struct radix_tree_root *root, unsigned long index, 728 - unsigned order, void *item) 798 + int radix_tree_insert(struct radix_tree_root *root, unsigned long index, 799 + void *item) 729 800 { 730 801 struct radix_tree_node *node; 731 802 void __rcu **slot; ··· 731 806 732 807 BUG_ON(radix_tree_is_internal_node(item)); 733 808 734 - error = __radix_tree_create(root, index, order, &node, &slot); 809 + error = __radix_tree_create(root, index, &node, &slot); 735 810 if (error) 736 811 return error; 737 812 738 - error = insert_entries(node, slot, item, order, false); 813 + error = insert_entries(node, slot, item, false); 739 814 if (error < 0) 740 815 return error; 741 816 ··· 750 825 751 826 return 0; 752 827 } 753 - EXPORT_SYMBOL(__radix_tree_insert); 828 + EXPORT_SYMBOL(radix_tree_insert); 754 829 755 830 /** 756 831 * __radix_tree_lookup - lookup an item in a radix tree ··· 842 917 } 843 918 EXPORT_SYMBOL(radix_tree_lookup); 844 919 845 - static inline void replace_sibling_entries(struct radix_tree_node *node, 846 - void __rcu **slot, int count, int values) 847 - { 848 - #ifdef CONFIG_RADIX_TREE_MULTIORDER 849 - unsigned offset = get_slot_offset(node, slot); 850 - void *ptr = xa_mk_sibling(offset); 851 - 852 - while (++offset < RADIX_TREE_MAP_SIZE) { 853 - if (rcu_dereference_raw(node->slots[offset]) != ptr) 854 - break; 855 - if (count < 0) { 856 - node->slots[offset] = NULL; 857 - node->count--; 858 - } 859 - node->nr_values += values; 860 - } 861 - #endif 862 - } 863 - 864 920 static void replace_slot(void __rcu **slot, void *item, 865 921 struct radix_tree_node *node, int count, int values) 866 922 { 867 923 if (node && (count || values)) { 868 924 node->count += count; 869 925 node->nr_values += values; 870 - replace_sibling_entries(node, slot, count, values); 871 926 } 872 927 873 928 rcu_assign_pointer(*slot, item); ··· 1128 1223 } 1129 1224 EXPORT_SYMBOL(radix_tree_tag_get); 1130 1225 1131 - static inline void __set_iter_shift(struct radix_tree_iter *iter, 1132 - unsigned int shift) 1133 - { 1134 - #ifdef CONFIG_RADIX_TREE_MULTIORDER 1135 - iter->shift = shift; 1136 - #endif 1137 - } 1138 - 1139 1226 /* Construct iter->tags bit-mask from node->tags[tag] array */ 1140 1227 static void set_iter_tags(struct radix_tree_iter *iter, 1141 1228 struct radix_tree_node *node, unsigned offset, ··· 1154 1257 } 1155 1258 } 1156 1259 1157 - #ifdef CONFIG_RADIX_TREE_MULTIORDER 1158 - static void __rcu **skip_siblings(struct radix_tree_node **nodep, 1159 - void __rcu **slot, struct radix_tree_iter *iter) 1160 - { 1161 - while (iter->index < iter->next_index) { 1162 - *nodep = rcu_dereference_raw(*slot); 1163 - if (*nodep && !xa_is_sibling(*nodep)) 1164 - return slot; 1165 - slot++; 1166 - iter->index = __radix_tree_iter_add(iter, 1); 1167 - iter->tags >>= 1; 1168 - } 1169 - 1170 - *nodep = NULL; 1171 - return NULL; 1172 - } 1173 - 1174 - void __rcu **__radix_tree_next_slot(void __rcu **slot, 1175 - struct radix_tree_iter *iter, unsigned flags) 1176 - { 1177 - unsigned tag = flags & RADIX_TREE_ITER_TAG_MASK; 1178 - struct radix_tree_node *node; 1179 - 1180 - slot = skip_siblings(&node, slot, iter); 1181 - 1182 - while (radix_tree_is_internal_node(node)) { 1183 - unsigned offset; 1184 - unsigned long next_index; 1185 - 1186 - if (node == RADIX_TREE_RETRY) 1187 - return slot; 1188 - node = entry_to_node(node); 1189 - iter->node = node; 1190 - iter->shift = node->shift; 1191 - 1192 - if (flags & RADIX_TREE_ITER_TAGGED) { 1193 - offset = radix_tree_find_next_bit(node, tag, 0); 1194 - if (offset == RADIX_TREE_MAP_SIZE) 1195 - return NULL; 1196 - slot = &node->slots[offset]; 1197 - iter->index = __radix_tree_iter_add(iter, offset); 1198 - set_iter_tags(iter, node, offset, tag); 1199 - node = rcu_dereference_raw(*slot); 1200 - } else { 1201 - offset = 0; 1202 - slot = &node->slots[0]; 1203 - for (;;) { 1204 - node = rcu_dereference_raw(*slot); 1205 - if (node) 1206 - break; 1207 - slot++; 1208 - offset++; 1209 - if (offset == RADIX_TREE_MAP_SIZE) 1210 - return NULL; 1211 - } 1212 - iter->index = __radix_tree_iter_add(iter, offset); 1213 - } 1214 - if ((flags & RADIX_TREE_ITER_CONTIG) && (offset > 0)) 1215 - goto none; 1216 - next_index = (iter->index | shift_maxindex(iter->shift)) + 1; 1217 - if (next_index < iter->next_index) 1218 - iter->next_index = next_index; 1219 - } 1220 - 1221 - return slot; 1222 - none: 1223 - iter->next_index = 0; 1224 - return NULL; 1225 - } 1226 - EXPORT_SYMBOL(__radix_tree_next_slot); 1227 - #else 1228 - static void __rcu **skip_siblings(struct radix_tree_node **nodep, 1229 - void __rcu **slot, struct radix_tree_iter *iter) 1230 - { 1231 - return slot; 1232 - } 1233 - #endif 1234 - 1235 1260 void __rcu **radix_tree_iter_resume(void __rcu **slot, 1236 1261 struct radix_tree_iter *iter) 1237 1262 { 1238 - struct radix_tree_node *node; 1239 - 1240 1263 slot++; 1241 1264 iter->index = __radix_tree_iter_add(iter, 1); 1242 - skip_siblings(&node, slot, iter); 1243 1265 iter->next_index = iter->index; 1244 1266 iter->tags = 0; 1245 1267 return NULL; ··· 1209 1393 iter->next_index = maxindex + 1; 1210 1394 iter->tags = 1; 1211 1395 iter->node = NULL; 1212 - __set_iter_shift(iter, 0); 1213 1396 return (void __rcu **)&root->xa_head; 1214 1397 } 1215 1398 ··· 1229 1414 while (++offset < RADIX_TREE_MAP_SIZE) { 1230 1415 void *slot = rcu_dereference_raw( 1231 1416 node->slots[offset]); 1232 - if (xa_is_sibling(slot)) 1233 - continue; 1234 1417 if (slot) 1235 1418 break; 1236 1419 } ··· 1249 1436 } while (node->shift && radix_tree_is_internal_node(child)); 1250 1437 1251 1438 /* Update the iterator state */ 1252 - iter->index = (index &~ node_maxindex(node)) | (offset << node->shift); 1439 + iter->index = (index &~ node_maxindex(node)) | offset; 1253 1440 iter->next_index = (index | node_maxindex(node)) + 1; 1254 1441 iter->node = node; 1255 - __set_iter_shift(iter, node->shift); 1256 1442 1257 1443 if (flags & RADIX_TREE_ITER_TAGGED) 1258 1444 set_iter_tags(iter, node, offset, tag); ··· 1562 1750 else 1563 1751 iter->next_index = 1; 1564 1752 iter->node = node; 1565 - __set_iter_shift(iter, shift); 1566 1753 set_iter_tags(iter, node, offset, IDR_FREE); 1567 1754 1568 1755 return slot;
+2 -2
mm/Kconfig
··· 379 379 bool "Transparent Hugepage Support" 380 380 depends on HAVE_ARCH_TRANSPARENT_HUGEPAGE 381 381 select COMPACTION 382 - select RADIX_TREE_MULTIORDER 382 + select XARRAY_MULTI 383 383 help 384 384 Transparent Hugepages allows the kernel to use huge pages and 385 385 huge tlb transparently to the applications whenever possible. ··· 671 671 depends on MEMORY_HOTREMOVE 672 672 depends on SPARSEMEM_VMEMMAP 673 673 depends on ARCH_HAS_ZONE_DEVICE 674 - select RADIX_TREE_MULTIORDER 674 + select XARRAY_MULTI 675 675 676 676 help 677 677 Device memory hotplug support allows for establishing pmem,
-1
tools/testing/radix-tree/generated/autoconf.h
··· 1 - #define CONFIG_RADIX_TREE_MULTIORDER 1 2 1 #define CONFIG_XARRAY_MULTI 1