Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

bcachefs: Make eytzinger size parameter more conventional

Signed-off-by: Kent Overstreet <kent.overstreet@gmail.com>

authored by

Kent Overstreet and committed by
Kent Overstreet
72492d55 17563164

+29 -33
+7 -7
fs/bcachefs/bset.c
··· 461 461 unsigned j) 462 462 { 463 463 return cacheline_to_bkey(b, t, 464 - __eytzinger1_to_inorder(j, t->size, t->extra), 464 + __eytzinger1_to_inorder(j, t->size - 1, t->extra), 465 465 bkey_float(b, t, j)->key_offset); 466 466 } 467 467 ··· 723 723 t->extra = (t->size - rounddown_pow_of_two(t->size - 1)) << 1; 724 724 725 725 /* First we figure out where the first key in each cacheline is */ 726 - eytzinger1_for_each(j, t->size) { 726 + eytzinger1_for_each(j, t->size - 1) { 727 727 while (bkey_to_cacheline(b, t, k) < cacheline) 728 728 prev = k, k = bkey_next(k); 729 729 ··· 755 755 } 756 756 757 757 /* Then we build the tree */ 758 - eytzinger1_for_each(j, t->size) 758 + eytzinger1_for_each(j, t->size - 1) 759 759 make_bfloat(b, t, j, 760 760 bkey_to_packed(&min_key), 761 761 bkey_to_packed(&max_key)); ··· 857 857 do { 858 858 p = j ? tree_to_bkey(b, t, 859 859 __inorder_to_eytzinger1(j--, 860 - t->size, t->extra)) 860 + t->size - 1, t->extra)) 861 861 : btree_bkey_first(b, t); 862 862 } while (p >= k); 863 863 break; ··· 1137 1137 n = n * 2 + (cmp < 0); 1138 1138 } while (n < t->size); 1139 1139 1140 - inorder = __eytzinger1_to_inorder(n >> 1, t->size, t->extra); 1140 + inorder = __eytzinger1_to_inorder(n >> 1, t->size - 1, t->extra); 1141 1141 1142 1142 /* 1143 1143 * n would have been the node we recursed to - the low bit tells us if ··· 1148 1148 if (unlikely(!inorder)) 1149 1149 return btree_bkey_first(b, t); 1150 1150 1151 - f = &base->f[eytzinger1_prev(n >> 1, t->size)]; 1151 + f = &base->f[eytzinger1_prev(n >> 1, t->size - 1)]; 1152 1152 } 1153 1153 1154 1154 return cacheline_to_bkey(b, t, inorder, f->key_offset); ··· 1565 1565 if (!inorder || inorder >= t->size) 1566 1566 return; 1567 1567 1568 - j = __inorder_to_eytzinger1(inorder, t->size, t->extra); 1568 + j = __inorder_to_eytzinger1(inorder, t->size - 1, t->extra); 1569 1569 if (k != tree_to_bkey(b, t, j)) 1570 1570 return; 1571 1571
+22 -26
fs/bcachefs/eytzinger.h
··· 17 17 * 18 18 * With one based indexing each level of the tree starts at a power of two - 19 19 * good for cacheline alignment: 20 - * 21 - * Size parameter is treated as if we were using 0 based indexing, however: 22 - * valid nodes, and inorder indices, are in the range [1..size) - that is, there 23 - * are actually size - 1 elements 24 20 */ 25 21 26 22 static inline unsigned eytzinger1_child(unsigned i, unsigned child) ··· 38 42 39 43 static inline unsigned eytzinger1_first(unsigned size) 40 44 { 41 - return rounddown_pow_of_two(size - 1); 45 + return rounddown_pow_of_two(size); 42 46 } 43 47 44 48 static inline unsigned eytzinger1_last(unsigned size) 45 49 { 46 - return rounddown_pow_of_two(size) - 1; 50 + return rounddown_pow_of_two(size + 1) - 1; 47 51 } 48 52 49 53 /* ··· 58 62 59 63 static inline unsigned eytzinger1_next(unsigned i, unsigned size) 60 64 { 61 - EBUG_ON(i >= size); 65 + EBUG_ON(i > size); 62 66 63 - if (eytzinger1_right_child(i) < size) { 67 + if (eytzinger1_right_child(i) <= size) { 64 68 i = eytzinger1_right_child(i); 65 69 66 - i <<= __fls(size) - __fls(i); 67 - i >>= i >= size; 70 + i <<= __fls(size + 1) - __fls(i); 71 + i >>= i > size; 68 72 } else { 69 73 i >>= ffz(i) + 1; 70 74 } ··· 74 78 75 79 static inline unsigned eytzinger1_prev(unsigned i, unsigned size) 76 80 { 77 - EBUG_ON(i >= size); 81 + EBUG_ON(i > size); 78 82 79 - if (eytzinger1_left_child(i) < size) { 83 + if (eytzinger1_left_child(i) <= size) { 80 84 i = eytzinger1_left_child(i) + 1; 81 85 82 - i <<= __fls(size) - __fls(i); 86 + i <<= __fls(size + 1) - __fls(i); 83 87 i -= 1; 84 - i >>= i >= size; 88 + i >>= i > size; 85 89 } else { 86 90 i >>= __ffs(i) + 1; 87 91 } ··· 91 95 92 96 static inline unsigned eytzinger1_extra(unsigned size) 93 97 { 94 - return (size - rounddown_pow_of_two(size - 1)) << 1; 98 + return (size + 1 - rounddown_pow_of_two(size)) << 1; 95 99 } 96 100 97 101 static inline unsigned __eytzinger1_to_inorder(unsigned i, unsigned size, 98 102 unsigned extra) 99 103 { 100 104 unsigned b = __fls(i); 101 - unsigned shift = __fls(size - 1) - b; 105 + unsigned shift = __fls(size) - b; 102 106 int s; 103 107 104 - EBUG_ON(!i || i >= size); 108 + EBUG_ON(!i || i > size); 105 109 106 110 i ^= 1U << b; 107 111 i <<= 1; ··· 126 130 unsigned shift; 127 131 int s; 128 132 129 - EBUG_ON(!i || i >= size); 133 + EBUG_ON(!i || i > size); 130 134 131 135 /* 132 136 * sign bit trick: ··· 140 144 shift = __ffs(i); 141 145 142 146 i >>= shift + 1; 143 - i |= 1U << (__fls(size - 1) - shift); 147 + i |= 1U << (__fls(size) - shift); 144 148 145 149 return i; 146 150 } ··· 181 185 182 186 static inline unsigned eytzinger0_first(unsigned size) 183 187 { 184 - return eytzinger1_first(size + 1) - 1; 188 + return eytzinger1_first(size) - 1; 185 189 } 186 190 187 191 static inline unsigned eytzinger0_last(unsigned size) 188 192 { 189 - return eytzinger1_last(size + 1) - 1; 193 + return eytzinger1_last(size) - 1; 190 194 } 191 195 192 196 static inline unsigned eytzinger0_next(unsigned i, unsigned size) 193 197 { 194 - return eytzinger1_next(i + 1, size + 1) - 1; 198 + return eytzinger1_next(i + 1, size) - 1; 195 199 } 196 200 197 201 static inline unsigned eytzinger0_prev(unsigned i, unsigned size) 198 202 { 199 - return eytzinger1_prev(i + 1, size + 1) - 1; 203 + return eytzinger1_prev(i + 1, size) - 1; 200 204 } 201 205 202 206 static inline unsigned eytzinger0_extra(unsigned size) 203 207 { 204 - return eytzinger1_extra(size + 1); 208 + return eytzinger1_extra(size); 205 209 } 206 210 207 211 static inline unsigned __eytzinger0_to_inorder(unsigned i, unsigned size, 208 212 unsigned extra) 209 213 { 210 - return __eytzinger1_to_inorder(i + 1, size + 1, extra) - 1; 214 + return __eytzinger1_to_inorder(i + 1, size, extra) - 1; 211 215 } 212 216 213 217 static inline unsigned __inorder_to_eytzinger0(unsigned i, unsigned size, 214 218 unsigned extra) 215 219 { 216 - return __inorder_to_eytzinger1(i + 1, size + 1, extra) - 1; 220 + return __inorder_to_eytzinger1(i + 1, size, extra) - 1; 217 221 } 218 222 219 223 static inline unsigned eytzinger0_to_inorder(unsigned i, unsigned size)