Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

radix tree test: Convert multiorder tests to XArray

This is the last remaining user of the multiorder functionality of the
radix tree. Test the XArray instead.

Signed-off-by: Matthew Wilcox <willy@infradead.org>

+50 -57
+50 -57
tools/testing/radix-tree/multiorder.c
··· 39 39 return xas_error(&xas); 40 40 } 41 41 42 - void multiorder_iteration(void) 42 + void multiorder_iteration(struct xarray *xa) 43 43 { 44 - RADIX_TREE(tree, GFP_KERNEL); 45 - struct radix_tree_iter iter; 46 - void **slot; 44 + XA_STATE(xas, xa, 0); 45 + struct item *item; 47 46 int i, j, err; 48 - 49 - printv(1, "Multiorder iteration test\n"); 50 47 51 48 #define NUM_ENTRIES 11 52 49 int index[NUM_ENTRIES] = {0, 2, 4, 8, 16, 32, 34, 36, 64, 72, 128}; 53 50 int order[NUM_ENTRIES] = {1, 1, 2, 3, 4, 1, 0, 1, 3, 0, 7}; 54 51 52 + printv(1, "Multiorder iteration test\n"); 53 + 55 54 for (i = 0; i < NUM_ENTRIES; i++) { 56 - err = item_insert_order(&tree, index[i], order[i]); 55 + err = item_insert_order(xa, index[i], order[i]); 57 56 assert(!err); 58 57 } 59 58 ··· 61 62 if (j <= (index[i] | ((1 << order[i]) - 1))) 62 63 break; 63 64 64 - radix_tree_for_each_slot(slot, &tree, &iter, j) { 65 - int height = order[i] / RADIX_TREE_MAP_SHIFT; 66 - int shift = height * RADIX_TREE_MAP_SHIFT; 65 + xas_set(&xas, j); 66 + xas_for_each(&xas, item, ULONG_MAX) { 67 + int height = order[i] / XA_CHUNK_SHIFT; 68 + int shift = height * XA_CHUNK_SHIFT; 67 69 unsigned long mask = (1UL << order[i]) - 1; 68 - struct item *item = *slot; 69 70 70 - assert((iter.index | mask) == (index[i] | mask)); 71 - assert(iter.shift == shift); 71 + assert((xas.xa_index | mask) == (index[i] | mask)); 72 + assert(xas.xa_node->shift == shift); 72 73 assert(!radix_tree_is_internal_node(item)); 73 74 assert((item->index | mask) == (index[i] | mask)); 74 75 assert(item->order == order[i]); ··· 76 77 } 77 78 } 78 79 79 - item_kill_tree(&tree); 80 + item_kill_tree(xa); 80 81 } 81 82 82 - void multiorder_tagged_iteration(void) 83 + void multiorder_tagged_iteration(struct xarray *xa) 83 84 { 84 - RADIX_TREE(tree, GFP_KERNEL); 85 - struct radix_tree_iter iter; 86 - void **slot; 85 + XA_STATE(xas, xa, 0); 86 + struct item *item; 87 87 int i, j; 88 - 89 - printv(1, "Multiorder tagged iteration test\n"); 90 88 91 89 #define MT_NUM_ENTRIES 9 92 90 int index[MT_NUM_ENTRIES] = {0, 2, 4, 16, 32, 40, 64, 72, 128}; ··· 92 96 #define TAG_ENTRIES 7 93 97 int tag_index[TAG_ENTRIES] = {0, 4, 16, 40, 64, 72, 128}; 94 98 95 - for (i = 0; i < MT_NUM_ENTRIES; i++) 96 - assert(!item_insert_order(&tree, index[i], order[i])); 99 + printv(1, "Multiorder tagged iteration test\n"); 97 100 98 - assert(!radix_tree_tagged(&tree, 1)); 101 + for (i = 0; i < MT_NUM_ENTRIES; i++) 102 + assert(!item_insert_order(xa, index[i], order[i])); 103 + 104 + assert(!xa_marked(xa, XA_MARK_1)); 99 105 100 106 for (i = 0; i < TAG_ENTRIES; i++) 101 - assert(radix_tree_tag_set(&tree, tag_index[i], 1)); 107 + xa_set_mark(xa, tag_index[i], XA_MARK_1); 102 108 103 109 for (j = 0; j < 256; j++) { 104 110 int k; ··· 112 114 break; 113 115 } 114 116 115 - radix_tree_for_each_tagged(slot, &tree, &iter, j, 1) { 117 + xas_set(&xas, j); 118 + xas_for_each_marked(&xas, item, ULONG_MAX, XA_MARK_1) { 116 119 unsigned long mask; 117 - struct item *item = *slot; 118 120 for (k = i; index[k] < tag_index[i]; k++) 119 121 ; 120 122 mask = (1UL << order[k]) - 1; 121 123 122 - assert((iter.index | mask) == (tag_index[i] | mask)); 123 - assert(!radix_tree_is_internal_node(item)); 124 + assert((xas.xa_index | mask) == (tag_index[i] | mask)); 125 + assert(!xa_is_internal(item)); 124 126 assert((item->index | mask) == (tag_index[i] | mask)); 125 127 assert(item->order == order[k]); 126 128 i++; 127 129 } 128 130 } 129 131 130 - assert(tag_tagged_items(&tree, 0, ~0UL, TAG_ENTRIES, XA_MARK_1, 132 + assert(tag_tagged_items(xa, 0, ULONG_MAX, TAG_ENTRIES, XA_MARK_1, 131 133 XA_MARK_2) == TAG_ENTRIES); 132 134 133 135 for (j = 0; j < 256; j++) { ··· 140 142 break; 141 143 } 142 144 143 - radix_tree_for_each_tagged(slot, &tree, &iter, j, 2) { 144 - struct item *item = *slot; 145 + xas_set(&xas, j); 146 + xas_for_each_marked(&xas, item, ULONG_MAX, XA_MARK_2) { 145 147 for (k = i; index[k] < tag_index[i]; k++) 146 148 ; 147 149 mask = (1 << order[k]) - 1; 148 150 149 - assert((iter.index | mask) == (tag_index[i] | mask)); 150 - assert(!radix_tree_is_internal_node(item)); 151 + assert((xas.xa_index | mask) == (tag_index[i] | mask)); 152 + assert(!xa_is_internal(item)); 151 153 assert((item->index | mask) == (tag_index[i] | mask)); 152 154 assert(item->order == order[k]); 153 155 i++; 154 156 } 155 157 } 156 158 157 - assert(tag_tagged_items(&tree, 1, ~0UL, MT_NUM_ENTRIES * 2, XA_MARK_1, 159 + assert(tag_tagged_items(xa, 1, ULONG_MAX, MT_NUM_ENTRIES * 2, XA_MARK_1, 158 160 XA_MARK_0) == TAG_ENTRIES); 159 161 i = 0; 160 - radix_tree_for_each_tagged(slot, &tree, &iter, 0, 0) { 161 - assert(iter.index == tag_index[i]); 162 + xas_set(&xas, 0); 163 + xas_for_each_marked(&xas, item, ULONG_MAX, XA_MARK_0) { 164 + assert(xas.xa_index == tag_index[i]); 162 165 i++; 163 166 } 167 + assert(i == TAG_ENTRIES); 164 168 165 - item_kill_tree(&tree); 169 + item_kill_tree(xa); 166 170 } 167 171 168 172 bool stop_iteration = false; ··· 187 187 188 188 static void *iterator_func(void *ptr) 189 189 { 190 - struct radix_tree_root *tree = ptr; 191 - struct radix_tree_iter iter; 190 + XA_STATE(xas, ptr, 0); 192 191 struct item *item; 193 - void **slot; 194 192 195 193 while (!stop_iteration) { 196 194 rcu_read_lock(); 197 - radix_tree_for_each_slot(slot, tree, &iter, 0) { 198 - item = radix_tree_deref_slot(slot); 199 - 200 - if (!item) 195 + xas_for_each(&xas, item, ULONG_MAX) { 196 + if (xas_retry(&xas, item)) 201 197 continue; 202 - if (radix_tree_deref_retry(item)) { 203 - slot = radix_tree_iter_retry(&iter); 204 - continue; 205 - } 206 198 207 - item_sanity(item, iter.index); 199 + item_sanity(item, xas.xa_index); 208 200 } 209 201 rcu_read_unlock(); 210 202 } 211 203 return NULL; 212 204 } 213 205 214 - static void multiorder_iteration_race(void) 206 + static void multiorder_iteration_race(struct xarray *xa) 215 207 { 216 208 const int num_threads = sysconf(_SC_NPROCESSORS_ONLN); 217 209 pthread_t worker_thread[num_threads]; 218 - RADIX_TREE(tree, GFP_KERNEL); 219 210 int i; 220 211 221 - pthread_create(&worker_thread[0], NULL, &creator_func, &tree); 212 + pthread_create(&worker_thread[0], NULL, &creator_func, xa); 222 213 for (i = 1; i < num_threads; i++) 223 - pthread_create(&worker_thread[i], NULL, &iterator_func, &tree); 214 + pthread_create(&worker_thread[i], NULL, &iterator_func, xa); 224 215 225 216 for (i = 0; i < num_threads; i++) 226 217 pthread_join(worker_thread[i], NULL); 227 218 228 - item_kill_tree(&tree); 219 + item_kill_tree(xa); 229 220 } 221 + 222 + static DEFINE_XARRAY(array); 230 223 231 224 void multiorder_checks(void) 232 225 { 233 - multiorder_iteration(); 234 - multiorder_tagged_iteration(); 235 - multiorder_iteration_race(); 226 + multiorder_iteration(&array); 227 + multiorder_tagged_iteration(&array); 228 + multiorder_iteration_race(&array); 236 229 237 230 radix_tree_cpu_dead(0); 238 231 }