return ma_is_root(mte_to_node(node));
}
+static inline bool mas_is_root_limits(const struct ma_state *mas)
+{
+ return !mas->min && mas->max == ULONG_MAX;
+}
+
static inline bool mt_is_alloc(struct maple_tree *mt)
{
return (mt->ma_flags & MAPLE_ALLOC_RANGE);
static inline bool mast_new_root(struct maple_subtree_state *mast,
struct ma_state *mas)
{
- if (mast->l->min || mast->l->max != ULONG_MAX)
+ if (!mas_is_root_limits(mast->l))
return false;
mas_mn(mast->l)->parent =
if (mast_sufficient(mast))
continue;
+ if (mas_is_root_limits(mast->orig_l)) // new root without a node.
+ break;
+
// Try to get enough data for the next iteration.
if (!mast_rebalance_from_siblings(mast))
if (!mast_rebalance_from_cousins(mast))
if (right)
mte_set_parent(right, l_mas.node, ++slot);
- if (!mast->bn->b_end) {
+ if (mas_is_root_limits(mast->orig_l)) {
mas_mn(&l_mas)->parent =
ma_parent_ptr(((unsigned long)mas->tree | MA_ROOT_PARENT));
} else {
mas_mn(&l_mas)->parent = mas_mn(mast->orig_l)->parent;
}
- mat_add(&free, mast->orig_l->node);
+ if (!mte_dead_node(mast->orig_l->node))
+ mat_add(&free, mast->orig_l->node);
+
mas_dup_state(mast->orig_l, &l_mas);
mas->depth = mast->orig_l->depth;
/* Node rebalancing may occur due to this store, so there may be two new
* entries per level plus a new root.
*/
- node_cnt += 1 + mas->tree->ma_height * 2;
+ node_cnt += 1 + mas->tree->ma_height * 3;
mas_node_cnt(mas, node_cnt);
if (mas_is_err(mas))
return 0;
mast.orig_l = &l_mas;
mast.orig_r = &r_mas;
- // FIXME: Is this needed?
-#if 0
- mas_dup_state(&l_mas, mas);
- mas->last = mas->index;
- mas_node_walk(mas, mte_node_type(mas->node), &range_min, &range_max);
- mas->index = mas->last = l_mas.last;
- mas_node_walk(mas, mte_node_type(mas->node), &range_min, &range_max);
- mas_dup_state(mas, &l_mas);
-#endif
-
-
// Set up right side.
mas_dup_state(&r_mas, mas);
r_mas.depth = mas->depth;
118, 128,
};
+ unsigned long overflow[] = {
+ 1317,
+ 1321,
+ 1351,
+ 1352,
+ 1365,
+ };
+
MT_BUG_ON(mt, !mtree_empty(mt));
check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
check_store_range(mt, 1792, 1799, NULL, 0);
mt_validate(mt);
mtree_destroy(mt);
+
+ mtree_init(mt, MAPLE_ALLOC_RANGE);
+ for (i = 0; i <= 200; i++) {
+ val = i*10;
+ val2 = (i+1)*10;
+ check_store_range(mt, val, val2, xa_mk_value(val), 0);
+ }
+
+ for (i = 10; i <= 19; i++) {
+ val = i*100 + 5;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val++;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val += 10;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+
+ val += 39;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val++;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val += 10;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ }
+
+ for (i = 13; i <= 14; i++) {
+ val = i*100 + 75;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val++;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val += 9;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val++;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val += 9;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val++;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ }
+ for (i = 0; i <= 3; i++) {
+ val = 1200 + i*10;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val++;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ }
+ for (i = 0; i <= 5; i++) {
+ val = 1270 + i;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val+= 10;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ }
+ for (i = 0; i <= 6; i++) {
+ val = 1400 + i*10;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val++;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ }
+
+ for (i = 0; i <= 5; i++) {
+ val = 1370 + i;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val+= 10;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ val+= 10;
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ }
+ for (i = 0; i < ARRAY_SIZE(overflow); i++) {
+ val = overflow[i];
+ check_store_range(mt, val, val, xa_mk_value(val), 0);
+ }
+
+
+ // Cause a 3 child split all the way up the tree.
+ check_store_range(mt, 1349, 1350, NULL, 0);
+ mt_validate(mt);
+ mtree_destroy(mt);
}
static noinline void check_next_entry(struct maple_tree *mt)