if (IS_ENABLED(__KERNEL__)) {
trans = this_cpu_xchg(c->btree_trans_bufs->trans, NULL);
if (trans) {
- memset(trans, 0, offsetof(struct btree_trans, updates));
+ memset(trans, 0, offsetof(struct btree_trans, list));
goto got_trans;
}
}
trans->paths_allocated = trans->_paths_allocated;
trans->sorted = trans->_sorted;
trans->paths = trans->_paths;
+ trans->updates = trans->_updates;
*trans_paths_nr(trans->paths) = BTREE_ITER_MAX;
unsigned long *paths_allocated;
struct btree_path *paths;
u8 *sorted;
+ struct btree_insert_entry *updates;
void *mem;
unsigned mem_top;
/* Entries before this are zeroed out on every bch2_trans_get() call */
- struct btree_insert_entry updates[BTREE_ITER_MAX];
-
struct list_head list;
struct closure ref;
struct btree_trans_paths trans_paths;
struct btree_path _paths[BTREE_ITER_MAX];
u8 _sorted[BTREE_ITER_MAX + 8];
+ struct btree_insert_entry _updates[BTREE_ITER_MAX];
};
static inline struct btree_path *btree_iter_path(struct btree_trans *trans, struct btree_iter *iter)
struct btree_path *path = trans->paths + path_idx;
EBUG_ON(!path->should_be_locked);
- EBUG_ON(trans->nr_updates >= ARRAY_SIZE(trans->updates));
+ EBUG_ON(trans->nr_updates >= BTREE_ITER_MAX);
EBUG_ON(!bpos_eq(k->k.p, path->pos));
n = (struct btree_insert_entry) {