lock_set_subclass(&b->lock.dep_map, level + 1, _THIS_IP_);
        b->level        = level;
+       b->parent       = (void *) ~0UL;
 
        mca_reinit(b);
 
 
 static int btree_split(struct btree *b, struct btree_op *op)
 {
-       bool split, root = b == b->c->root;
+       bool split;
        struct btree *n1, *n2 = NULL, *n3 = NULL;
        uint64_t start_time = local_clock();
 
                if (IS_ERR(n2))
                        goto err_free1;
 
-               if (root) {
+               if (!b->parent) {
                        n3 = bch_btree_node_alloc(b->c, b->level + 1, &op->cl);
                        if (IS_ERR(n3))
                                goto err_free2;
 
                bch_btree_insert_keys(n1, op);
 
-               /* Has to be a linear search because we don't have an auxiliary
+               /*
+                * Has to be a linear search because we don't have an auxiliary
                 * search tree yet
                 */
 
        bch_btree_node_write(n1, &op->cl);
 
        if (n3) {
+               /* Depth increases, make a new root */
+
                bkey_copy_key(&n3->key, &MAX_KEY);
                bch_btree_insert_keys(n3, op);
                bch_btree_node_write(n3, &op->cl);
                closure_sync(&op->cl);
                bch_btree_set_root(n3);
                rw_unlock(true, n3);
-       } else if (root) {
+       } else if (!b->parent) {
+               /* Root filled up but didn't need to be split */
+
                op->keys.top = op->keys.bottom;
                closure_sync(&op->cl);
                bch_btree_set_root(n1);
 
        unsigned long           seq;
        struct rw_semaphore     lock;
        struct cache_set        *c;
+       struct btree            *parent;
 
        unsigned long           flags;
        uint16_t                written;        /* would be nice to kill */
 ({                                                                     \
        int _r, l = (b)->level - 1;                                     \
        bool _w = l <= (op)->lock;                                      \
-       struct btree *_b = bch_btree_node_get((b)->c, key, l, op);      \
-       if (!IS_ERR(_b)) {                                              \
-               _r = bch_btree_ ## fn(_b, op, ##__VA_ARGS__);           \
-               rw_unlock(_w, _b);                                      \
+       struct btree *_child = bch_btree_node_get((b)->c, key, l, op);  \
+       if (!IS_ERR(_child)) {                                          \
+               _child->parent = (b);                                   \
+               _r = bch_btree_ ## fn(_child, op, ##__VA_ARGS__);       \
+               rw_unlock(_w, _child);                                  \
        } else                                                          \
-               _r = PTR_ERR(_b);                                       \
+               _r = PTR_ERR(_child);                                   \
        _r;                                                             \
 })
 
                bool _w = insert_lock(op, _b);                          \
                rw_lock(_w, _b, _b->level);                             \
                if (_b == (c)->root &&                                  \
-                   _w == insert_lock(op, _b))                          \
+                   _w == insert_lock(op, _b)) {                        \
+                       _b->parent = NULL;                              \
                        _r = bch_btree_ ## fn(_b, op, ##__VA_ARGS__);   \
+               }                                                       \
                rw_unlock(_w, _b);                                      \
                bch_cannibalize_unlock(c, &(op)->cl);                   \
        } while (_r == -EINTR);                                         \