{
mas_mn(mast->l)->parent =
ma_parent_ptr(((unsigned long)mas->tree | MA_ROOT_PARENT));
- if (!mte_is_root(mast->orig_l->node)) {
+ if (!mte_dead_node(mast->orig_l->node) &&
+ !mte_is_root(mast->orig_l->node)) {
do {
mast_ascend_free(mast);
mast_topiary(mast);
} while (!mte_is_root(mast->orig_l->node));
- } else if ((mast->orig_l->node != mas->node) &&
+ }
+ if ((mast->orig_l->node != mas->node) &&
(mast->l->depth > mas_mt_height(mas))) {
mat_add(mast->free, mas->node);
}
-
- mat_add(mast->free, mast->orig_l->node);
}
static inline void mast_cp_to_nodes(struct maple_subtree_state *mast,
struct ma_state *mas, int height)
{
struct maple_enode *ancestor;
- unsigned int mas_height = mas_mt_height(mas);
if (mte_is_root(mas->node)) {
if (mt_is_alloc(mas->tree))
mte_set_parent(mast->l->node, ancestor, mas_offset(mast->l));
mte_set_parent(mast->r->node, ancestor, mas_offset(mast->r));
mte_to_node(ancestor)->parent = mas_mn(mas)->parent;
- // New root requires a new height.
- if (mas->full_cnt >= mas_height)
- mas->depth = mas_height + 1;
mast->l->node = ancestor;
mab_mas_cp(mast->bn, 0, mt_slots[mast->bn->type] - 1, mast->l);
mte_set_rcu_slot(mas->node, slot, entry);
mte_set_pivot(mas->node, slot++, mas->last);
-#if 0
- if (mt_is_alloc(mas->tree)) {
- //FIXME: arch_get_mmap_end? mas->index = TASK_SIZE / PAGE_SIZE - 1;
- unsigned long mmap_end = 0x2000000000000UL;
-
- if (mas->index < mmap_end - 1)
- mte_set_pivot(mas->node, slot++, mmap_end - 1);
- mte_set_rcu_slot(mas->node, slot, XA_ZERO_ENTRY);
- mte_set_pivot(mas->node, slot++, mt_max[type]);
- }
-#endif
/* swap the new root into the tree */
rcu_assign_pointer(mas->tree->ma_root, mte_mk_root(mas->node));
mas->depth = 1;
// count slots
s_entry = mas_next(mas_start, mas_end->last);
- while (!mas_is_none(mas_start)) {
+ while (s_entry) {
BUG_ON(retry > 50); // stop infinite retry on testing.
if (xa_is_zero(s_entry)) {
retry++;
mtree_destroy(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
- for (i = 0; i <= 200; i++) {
+ for (i = 0; i <= 1590; i++) {
val = i*10;
val2 = (i+1)*10;
check_store_range(mt, val, val2, xa_mk_value(val), 0);
+ if (mt_height(mt) >= 4)
+ BUG_ON(1);
}
-
- check_store_range(mt, 1655, 1655, xa_mk_value(1655), 0);
- check_store_range(mt, 1656, 1656, xa_mk_value(1656), 0);
- check_store_range(mt, 1666, 1666, xa_mk_value(1666), 0);
-
- check_store_range(mt, 1705, 1705, xa_mk_value(1705), 0);
- check_store_range(mt, 1706, 1706, xa_mk_value(1706), 0);
- check_store_range(mt, 1716, 1716, xa_mk_value(1716), 0);
-
- check_store_range(mt, 1755, 1755, xa_mk_value(1755), 0);
- check_store_range(mt, 1756, 1756, xa_mk_value(1756), 0);
-
- check_store_range(mt, 1805, 1806, xa_mk_value(1805), 0);
- check_store_range(mt, 1806, 1806, xa_mk_value(1806), 0);
-
- check_store_range(mt, 1855, 1855, xa_mk_value(1855), 0);
- check_store_range(mt, 1856, 1856, xa_mk_value(1856), 0);
- check_store_range(mt, 1866, 1866, xa_mk_value(1866), 0);
- // Cause a 3 child split.
- check_store_range(mt, 1792, 1799, NULL, 0);
- mt_validate(mt);
- MT_BUG_ON(mt, !mt_height(mt));
- mtree_destroy(mt);
-
- mtree_init(mt, MAPLE_ALLOC_RANGE);
- for (i = 0; i <= 200; i++) {
- val = i*10;
- val2 = (i+1)*10;
- check_store_range(mt, val, val2, xa_mk_value(val), 0);
- mt_validate(mt);
- }
-
- for (i = 10; i <= 19; i++) {
- val = i*100 + 5;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- mt_validate(mt);
- val++;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- mt_validate(mt);
- val += 10;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- mt_validate(mt);
-
- val += 39;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- mt_validate(mt);
- val++;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- mt_validate(mt);
- val += 10;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- mt_validate(mt);
- }
-
- for (i = 13; i <= 14; i++) {
- val = i*100 + 75;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- val++;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- val += 9;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- val++;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- val += 9;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- val++;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- mt_validate(mt);
- }
-
- for (i = 0; i <= 3; i++) {
- val = 1200 + i*10;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- val++;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- mt_validate(mt);
- }
- for (i = 0; i <= 5; i++) {
- val = 1270 + i;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- val+= 10;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- mt_validate(mt);
- }
- for (i = 0; i <= 6; i++) {
- val = 1400 + i*10;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- val++;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- mt_validate(mt);
- }
- for (i = 0; i <= 5; i++) {
- val = 1370 + i;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- val+= 10;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- val+= 10;
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- mt_validate(mt);
- }
- for (i = 0; i < ARRAY_SIZE(overflow); i++) {
- val = overflow[i];
- check_store_range(mt, val, val, xa_mk_value(val), 0);
- mt_validate(mt);
- }
-
// Cause a 3 child split all the way up the tree.
- check_store_range(mt, 1349, 1350, NULL, 0);
- mt_validate(mt);
- mt_set_in_rcu(mt);
- MT_BUG_ON(mt, !mt_height(mt));
- mt_clear_in_rcu(mt);
- MT_BUG_ON(mt, !mt_height(mt));
- mtree_destroy(mt);
+ check_store_range(mt, 15519, 15519, NULL, 0);
+ BUG_ON(mt_height(mt) < 4);
}
static noinline void check_next_entry(struct maple_tree *mt)
check_new_node(&tree);
mtree_destroy(&tree);
+
mtree_init(&tree, 0);
check_dfs_preorder(&tree);
mtree_destroy(&tree);