Commit 72492d55 authored by Kent Overstreet's avatar Kent Overstreet Committed by Kent Overstreet

bcachefs: Make eytzinger size parameter more conventional

Signed-off-by: default avatarKent Overstreet <kent.overstreet@gmail.com>
parent 17563164
...@@ -461,7 +461,7 @@ static inline struct bkey_packed *tree_to_bkey(const struct btree *b, ...@@ -461,7 +461,7 @@ static inline struct bkey_packed *tree_to_bkey(const struct btree *b,
unsigned j) unsigned j)
{ {
return cacheline_to_bkey(b, t, return cacheline_to_bkey(b, t,
__eytzinger1_to_inorder(j, t->size, t->extra), __eytzinger1_to_inorder(j, t->size - 1, t->extra),
bkey_float(b, t, j)->key_offset); bkey_float(b, t, j)->key_offset);
} }
...@@ -723,7 +723,7 @@ static noinline void __build_ro_aux_tree(struct btree *b, struct bset_tree *t) ...@@ -723,7 +723,7 @@ static noinline void __build_ro_aux_tree(struct btree *b, struct bset_tree *t)
t->extra = (t->size - rounddown_pow_of_two(t->size - 1)) << 1; t->extra = (t->size - rounddown_pow_of_two(t->size - 1)) << 1;
/* First we figure out where the first key in each cacheline is */ /* First we figure out where the first key in each cacheline is */
eytzinger1_for_each(j, t->size) { eytzinger1_for_each(j, t->size - 1) {
while (bkey_to_cacheline(b, t, k) < cacheline) while (bkey_to_cacheline(b, t, k) < cacheline)
prev = k, k = bkey_next(k); prev = k, k = bkey_next(k);
...@@ -755,7 +755,7 @@ static noinline void __build_ro_aux_tree(struct btree *b, struct bset_tree *t) ...@@ -755,7 +755,7 @@ static noinline void __build_ro_aux_tree(struct btree *b, struct bset_tree *t)
} }
/* Then we build the tree */ /* Then we build the tree */
eytzinger1_for_each(j, t->size) eytzinger1_for_each(j, t->size - 1)
make_bfloat(b, t, j, make_bfloat(b, t, j,
bkey_to_packed(&min_key), bkey_to_packed(&min_key),
bkey_to_packed(&max_key)); bkey_to_packed(&max_key));
...@@ -857,7 +857,7 @@ static struct bkey_packed *__bkey_prev(struct btree *b, struct bset_tree *t, ...@@ -857,7 +857,7 @@ static struct bkey_packed *__bkey_prev(struct btree *b, struct bset_tree *t,
do { do {
p = j ? tree_to_bkey(b, t, p = j ? tree_to_bkey(b, t,
__inorder_to_eytzinger1(j--, __inorder_to_eytzinger1(j--,
t->size, t->extra)) t->size - 1, t->extra))
: btree_bkey_first(b, t); : btree_bkey_first(b, t);
} while (p >= k); } while (p >= k);
break; break;
...@@ -1137,7 +1137,7 @@ static struct bkey_packed *bset_search_tree(const struct btree *b, ...@@ -1137,7 +1137,7 @@ static struct bkey_packed *bset_search_tree(const struct btree *b,
n = n * 2 + (cmp < 0); n = n * 2 + (cmp < 0);
} while (n < t->size); } while (n < t->size);
inorder = __eytzinger1_to_inorder(n >> 1, t->size, t->extra); inorder = __eytzinger1_to_inorder(n >> 1, t->size - 1, t->extra);
/* /*
* n would have been the node we recursed to - the low bit tells us if * n would have been the node we recursed to - the low bit tells us if
...@@ -1148,7 +1148,7 @@ static struct bkey_packed *bset_search_tree(const struct btree *b, ...@@ -1148,7 +1148,7 @@ static struct bkey_packed *bset_search_tree(const struct btree *b,
if (unlikely(!inorder)) if (unlikely(!inorder))
return btree_bkey_first(b, t); return btree_bkey_first(b, t);
f = &base->f[eytzinger1_prev(n >> 1, t->size)]; f = &base->f[eytzinger1_prev(n >> 1, t->size - 1)];
} }
return cacheline_to_bkey(b, t, inorder, f->key_offset); return cacheline_to_bkey(b, t, inorder, f->key_offset);
...@@ -1565,7 +1565,7 @@ void bch2_bfloat_to_text(struct printbuf *out, struct btree *b, ...@@ -1565,7 +1565,7 @@ void bch2_bfloat_to_text(struct printbuf *out, struct btree *b,
if (!inorder || inorder >= t->size) if (!inorder || inorder >= t->size)
return; return;
j = __inorder_to_eytzinger1(inorder, t->size, t->extra); j = __inorder_to_eytzinger1(inorder, t->size - 1, t->extra);
if (k != tree_to_bkey(b, t, j)) if (k != tree_to_bkey(b, t, j))
return; return;
......
...@@ -17,10 +17,6 @@ ...@@ -17,10 +17,6 @@
* *
* With one based indexing each level of the tree starts at a power of two - * With one based indexing each level of the tree starts at a power of two -
* good for cacheline alignment: * good for cacheline alignment:
*
* Size parameter is treated as if we were using 0 based indexing, however:
* valid nodes, and inorder indices, are in the range [1..size) - that is, there
* are actually size - 1 elements
*/ */
static inline unsigned eytzinger1_child(unsigned i, unsigned child) static inline unsigned eytzinger1_child(unsigned i, unsigned child)
...@@ -42,12 +38,12 @@ static inline unsigned eytzinger1_right_child(unsigned i) ...@@ -42,12 +38,12 @@ static inline unsigned eytzinger1_right_child(unsigned i)
static inline unsigned eytzinger1_first(unsigned size) static inline unsigned eytzinger1_first(unsigned size)
{ {
return rounddown_pow_of_two(size - 1); return rounddown_pow_of_two(size);
} }
static inline unsigned eytzinger1_last(unsigned size) static inline unsigned eytzinger1_last(unsigned size)
{ {
return rounddown_pow_of_two(size) - 1; return rounddown_pow_of_two(size + 1) - 1;
} }
/* /*
...@@ -62,13 +58,13 @@ static inline unsigned eytzinger1_last(unsigned size) ...@@ -62,13 +58,13 @@ static inline unsigned eytzinger1_last(unsigned size)
static inline unsigned eytzinger1_next(unsigned i, unsigned size) static inline unsigned eytzinger1_next(unsigned i, unsigned size)
{ {
EBUG_ON(i >= size); EBUG_ON(i > size);
if (eytzinger1_right_child(i) < size) { if (eytzinger1_right_child(i) <= size) {
i = eytzinger1_right_child(i); i = eytzinger1_right_child(i);
i <<= __fls(size) - __fls(i); i <<= __fls(size + 1) - __fls(i);
i >>= i >= size; i >>= i > size;
} else { } else {
i >>= ffz(i) + 1; i >>= ffz(i) + 1;
} }
...@@ -78,14 +74,14 @@ static inline unsigned eytzinger1_next(unsigned i, unsigned size) ...@@ -78,14 +74,14 @@ static inline unsigned eytzinger1_next(unsigned i, unsigned size)
static inline unsigned eytzinger1_prev(unsigned i, unsigned size) static inline unsigned eytzinger1_prev(unsigned i, unsigned size)
{ {
EBUG_ON(i >= size); EBUG_ON(i > size);
if (eytzinger1_left_child(i) < size) { if (eytzinger1_left_child(i) <= size) {
i = eytzinger1_left_child(i) + 1; i = eytzinger1_left_child(i) + 1;
i <<= __fls(size) - __fls(i); i <<= __fls(size + 1) - __fls(i);
i -= 1; i -= 1;
i >>= i >= size; i >>= i > size;
} else { } else {
i >>= __ffs(i) + 1; i >>= __ffs(i) + 1;
} }
...@@ -95,17 +91,17 @@ static inline unsigned eytzinger1_prev(unsigned i, unsigned size) ...@@ -95,17 +91,17 @@ static inline unsigned eytzinger1_prev(unsigned i, unsigned size)
static inline unsigned eytzinger1_extra(unsigned size) static inline unsigned eytzinger1_extra(unsigned size)
{ {
return (size - rounddown_pow_of_two(size - 1)) << 1; return (size + 1 - rounddown_pow_of_two(size)) << 1;
} }
static inline unsigned __eytzinger1_to_inorder(unsigned i, unsigned size, static inline unsigned __eytzinger1_to_inorder(unsigned i, unsigned size,
unsigned extra) unsigned extra)
{ {
unsigned b = __fls(i); unsigned b = __fls(i);
unsigned shift = __fls(size - 1) - b; unsigned shift = __fls(size) - b;
int s; int s;
EBUG_ON(!i || i >= size); EBUG_ON(!i || i > size);
i ^= 1U << b; i ^= 1U << b;
i <<= 1; i <<= 1;
...@@ -130,7 +126,7 @@ static inline unsigned __inorder_to_eytzinger1(unsigned i, unsigned size, ...@@ -130,7 +126,7 @@ static inline unsigned __inorder_to_eytzinger1(unsigned i, unsigned size,
unsigned shift; unsigned shift;
int s; int s;
EBUG_ON(!i || i >= size); EBUG_ON(!i || i > size);
/* /*
* sign bit trick: * sign bit trick:
...@@ -144,7 +140,7 @@ static inline unsigned __inorder_to_eytzinger1(unsigned i, unsigned size, ...@@ -144,7 +140,7 @@ static inline unsigned __inorder_to_eytzinger1(unsigned i, unsigned size,
shift = __ffs(i); shift = __ffs(i);
i >>= shift + 1; i >>= shift + 1;
i |= 1U << (__fls(size - 1) - shift); i |= 1U << (__fls(size) - shift);
return i; return i;
} }
...@@ -185,39 +181,39 @@ static inline unsigned eytzinger0_right_child(unsigned i) ...@@ -185,39 +181,39 @@ static inline unsigned eytzinger0_right_child(unsigned i)
static inline unsigned eytzinger0_first(unsigned size) static inline unsigned eytzinger0_first(unsigned size)
{ {
return eytzinger1_first(size + 1) - 1; return eytzinger1_first(size) - 1;
} }
static inline unsigned eytzinger0_last(unsigned size) static inline unsigned eytzinger0_last(unsigned size)
{ {
return eytzinger1_last(size + 1) - 1; return eytzinger1_last(size) - 1;
} }
static inline unsigned eytzinger0_next(unsigned i, unsigned size) static inline unsigned eytzinger0_next(unsigned i, unsigned size)
{ {
return eytzinger1_next(i + 1, size + 1) - 1; return eytzinger1_next(i + 1, size) - 1;
} }
static inline unsigned eytzinger0_prev(unsigned i, unsigned size) static inline unsigned eytzinger0_prev(unsigned i, unsigned size)
{ {
return eytzinger1_prev(i + 1, size + 1) - 1; return eytzinger1_prev(i + 1, size) - 1;
} }
static inline unsigned eytzinger0_extra(unsigned size) static inline unsigned eytzinger0_extra(unsigned size)
{ {
return eytzinger1_extra(size + 1); return eytzinger1_extra(size);
} }
static inline unsigned __eytzinger0_to_inorder(unsigned i, unsigned size, static inline unsigned __eytzinger0_to_inorder(unsigned i, unsigned size,
unsigned extra) unsigned extra)
{ {
return __eytzinger1_to_inorder(i + 1, size + 1, extra) - 1; return __eytzinger1_to_inorder(i + 1, size, extra) - 1;
} }
static inline unsigned __inorder_to_eytzinger0(unsigned i, unsigned size, static inline unsigned __inorder_to_eytzinger0(unsigned i, unsigned size,
unsigned extra) unsigned extra)
{ {
return __inorder_to_eytzinger1(i + 1, size + 1, extra) - 1; return __inorder_to_eytzinger1(i + 1, size, extra) - 1;
} }
static inline unsigned eytzinger0_to_inorder(unsigned i, unsigned size) static inline unsigned eytzinger0_to_inorder(unsigned i, unsigned size)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment