{
struct btree_transaction_stats *s = btree_trans_stats(trans);
struct printbuf buf = PRINTBUF;
- size_t nr = bitmap_weight(trans->paths_allocated, BTREE_ITER_MAX);
+ size_t nr = bitmap_weight(trans->paths_allocated, trans->nr_paths);
if (!s)
return;
static inline btree_path_idx_t btree_path_alloc(struct btree_trans *trans,
btree_path_idx_t pos)
{
- btree_path_idx_t idx = find_first_zero_bit(trans->paths_allocated, BTREE_ITER_MAX);
+ btree_path_idx_t idx = find_first_zero_bit(trans->paths_allocated, trans->nr_paths);
- if (unlikely(idx == BTREE_ITER_MAX))
+ if (unlikely(idx == trans->nr_paths))
btree_path_overflow(trans);
/*
struct btree_path *path;
unsigned i;
- BUG_ON(trans->nr_sorted != bitmap_weight(trans->paths_allocated, BTREE_ITER_MAX) - 1);
+ BUG_ON(trans->nr_sorted != bitmap_weight(trans->paths_allocated, trans->nr_paths) - 1);
trans_for_each_path(trans, path, i) {
BUG_ON(path->sorted_idx >= trans->nr_sorted);
trans->journal_replay_not_finished =
unlikely(!test_bit(JOURNAL_REPLAY_DONE, &c->journal.flags)) &&
atomic_inc_not_zero(&c->journal_keys.ref);
+ trans->nr_paths = ARRAY_SIZE(trans->_paths);
trans->paths_allocated = trans->_paths_allocated;
trans->sorted = trans->_sorted;
trans->paths = trans->_paths;
static inline struct btree_path *
__trans_next_path(struct btree_trans *trans, unsigned *idx)
{
- *idx = find_next_bit(trans->paths_allocated, BTREE_ITER_MAX, *idx);
+ unsigned long *w = trans->paths_allocated + *idx / BITS_PER_LONG;
+ /*
+ * Open coded find_next_bit(), because
+ * - this is fast path, we can't afford the function call
+ * - and we know that nr_paths is a multiple of BITS_PER_LONG,
+ */
+ while (*idx < trans->nr_paths) {
+ unsigned long v = *w >> (*idx & (BITS_PER_LONG - 1));
+ if (v) {
+ *idx += __ffs(v);
+ return trans->paths + *idx;
+ }
+
+ *idx += BITS_PER_LONG;
+ *idx &= ~(BITS_PER_LONG - 1);
+ w++;
+ }
- return *idx < BTREE_ITER_MAX ? &trans->paths[*idx] : NULL;
+ return NULL;
}
/*
static inline int btree_trans_too_many_iters(struct btree_trans *trans)
{
- if (bitmap_weight(trans->paths_allocated, BTREE_ITER_MAX) > BTREE_ITER_MAX - 8)
+ if (bitmap_weight(trans->paths_allocated, trans->nr_paths) > BTREE_ITER_MAX - 8)
return __bch2_btree_trans_too_many_iters(trans);
return 0;
unsigned mem_bytes;
btree_path_idx_t nr_sorted;
+ btree_path_idx_t nr_paths;
btree_path_idx_t nr_paths_max;
u8 fn_idx;
u8 nr_updates;
struct btree_path *path = trans->paths + path_idx;
EBUG_ON(!path->should_be_locked);
- EBUG_ON(trans->nr_updates >= BTREE_ITER_MAX);
+ EBUG_ON(trans->nr_updates >= trans->nr_paths);
EBUG_ON(!bpos_eq(k->k.p, path->pos));
n = (struct btree_insert_entry) {