static bool mas_in_rcu(struct ma_state *mas)
{
- return mas->tree->ma_flags & MAPLE_USE_RCU;
+ return mas->tree->ma_flags & (1 << MAPLE_USE_RCU);
}
static void mas_set_height(struct ma_state *mas)
mte_set_rcu_slot(parent, slot, mas->node);
}
- if (!advanced) {
+ if (!advanced)
mte_free(prev);
- return;
- }
}
/*
/* Copy data from next level in the tree to mast->bn from next iteration */
memset(mast->bn, 0, sizeof(struct maple_big_node));
+ mast->bn->type = mte_node_type(left);
mast->orig_l->depth++;
if (mast_new_root(mast, mas))
mas_dup_state(mast->orig_l, &l_mas);
mas->depth = mast->orig_l->depth;
-
new_root:
mte_set_node_dead(mas->node);
// Set up mas for insertion.
mas_wmb_replace(mas, &free, &destroy);
return mast->bn->b_end;
}
+static inline int mas_cnt_positive(struct ma_state *mas)
+{
+ if (mas->full_cnt < 0)
+ return mas->full_cnt * -1;
+ return mas->full_cnt;
+}
/*
* mas_rebalance() - Rebalance a given node.
static inline int mas_rebalance(struct ma_state *mas,
struct maple_big_node *b_node)
{
- char empty_cnt = mas->full_cnt * -1;
+ char empty_cnt = mas_cnt_positive(mas);
struct maple_subtree_state mast;
unsigned char shift, b_end = ++b_node->b_end;
return mas_spanning_rebalance(mas, &mast, empty_cnt);
}
-static inline bool mas_split_final_node(struct maple_subtree_state *mast,
+static inline bool _mas_split_final_node(struct maple_subtree_state *mast,
struct ma_state *mas, int height)
{
struct maple_enode *ancestor;
unsigned int mas_height = mas_mt_height(mas);
- if (height <= mas->full_cnt)
- return false;
-
if (mte_is_root(mas->node)) {
if (mt_is_alloc(mas->tree))
mast->bn->type = maple_arange_64;
mte_set_parent(mast->r->node, ancestor, mas_offset(mast->r));
mte_to_node(ancestor)->parent = mas_mn(mas)->parent;
// New root requires a new height.
- if (mas->full_cnt >= mas_height) {
+ if (mas->full_cnt >= mas_height)
mas->depth = mas_height + 1;
- mas_set_height(mas);
- }
mast->l->node = ancestor;
mab_mas_cp(mast->bn, 0, mt_slots[mast->bn->type] - 1, mast->l);
return true;
}
+static inline bool mas_split_final_node(struct maple_subtree_state *mast,
+ struct ma_state *mas, int height)
+{
+ if (height <= mas->full_cnt)
+ return false;
+
+ return _mas_split_final_node(mast, mas, height);
+}
static inline void mast_split_fill_bnode(struct maple_subtree_state *mast,
struct ma_state *mas,
unsigned char skip)
}
}
-static inline bool mas_push_data(struct ma_state *mas,
+static inline bool mas_push_data(struct ma_state *mas, int height,
struct maple_subtree_state *mast, bool left)
{
unsigned char slot_total = mast->bn->b_end;
mast_split_data(mast, mas, split);
mast_split_fill_bnode(mast, mas, 2);
- mas_split_final_node(mast, mas, mas->full_cnt + 1);
+ _mas_split_final_node(mast, mas, height + 1);
return true;
}
-static inline bool mas_push_right(struct ma_state *mas,
+static inline bool mas_push_right(struct ma_state *mas, int height,
struct maple_subtree_state *mast)
{
- return mas_push_data(mas, mast, false);
+ return mas_push_data(mas, height, mast, false);
}
-static inline bool mas_push_left(struct ma_state *mas,
+static inline bool mas_push_left(struct ma_state *mas, int height,
struct maple_subtree_state *mast)
{
- return mas_push_data(mas, mast, true);
+ return mas_push_data(mas, height, mast, true);
}
static inline int mas_split(struct ma_state *mas,
mast.free = &mat;
mast.bn = b_node;
+ mas->depth = mas_mt_height(mas);
while (height++ <= mas->full_cnt) {
if (mas_split_final_node(&mast, mas, height))
break;
mas_dup_state(&l_mas, mas);
mas_dup_state(&r_mas, mas);
- l_mas.node = mas_new_ma_node(mas, b_node);
- r_mas.node = mas_new_ma_node(mas, b_node);
- if (mas_push_left(mas, &mast))
+ mast.l->node = mas_new_ma_node(mas, b_node);
+ mast.r->node = mas_new_ma_node(mas, b_node);
+ if (mas_push_left(mas, height, &mast))
break;
- if (mas_push_right(mas, &mast))
+ if (mas_push_right(mas, height, &mast))
break;
split = mab_calc_split(mast.bn, &mid_split);
mas_wmb_replace(mas, mast.free, NULL);
return 1;
}
-static inline int mas_cnt_positive(struct ma_state *mas)
-{
- if (mas->full_cnt < 0)
- return mas->full_cnt * -1;
- return mas->full_cnt;
-}
static inline bool mas_reuse_node(struct ma_state *mas,
struct maple_big_node *bn,
unsigned char end)
type = mte_node_type(mas->node);
mas->depth++;
-
end = mas_data_end(mas);
if (unlikely(!mas_node_walk(mas, type, range_min, range_max)))
return false;
else
mas->full_cnt = 0;
-
-
next = mas_get_rcu_slot(mas, mas_offset(mas));
-
// Traverse.
mas->max = *range_max;
mas->min = *range_min;
bool ret = false;
while (true) {
+ mas->depth++;
type = mte_node_type(mas->node);
if (unlikely(!mas_node_walk(mas, type, range_min, range_max)))
return false;
// Traverse.
- mas->depth++;
mas->max = *range_max;
mas->min = *range_min;
if (mas->full_cnt > 0)
node_cnt = mas->full_cnt; // For split upwards.
else
- node_cnt = -1 * mas->full_cnt; // For rebalance upwards.
+ node_cnt = mas_cnt_positive(mas); // For rebalance upwards.
/* Node rebalancing may occur due to this store, so there may be two new
* entries per level plus a new root.
*/
return 0;
mast.bn = &b_node;
+ b_node.type = mte_node_type(mas->node);
mast.orig_l = &l_mas;
mast.orig_r = &r_mas;
int ret = 0;
- if (mas_start(mas) || (mas_is_none(mas) || mas->node == MAS_ROOT))
+ if (mas_start(mas) || (mas_is_none(mas) || mas->node == MAS_ROOT)) {
ret = ma_root_ptr(mas, entry, content, overwrite);
-
- if (mas_is_err(mas))
- return NULL;
-
- if (ret > 2)
- return NULL;
- else if (ret)
- return content;
+ if (mas_is_err(mas))
+ return NULL;
+ if (ret)
+ goto complete_at_root;
+ }
if (!mas_wr_walk(mas, &r_min, &r_max, entry) && !mas->span_enode)
return NULL;
return NULL; // spanning writes always overwrite something.
}
ret = mas_spanning_store(mas, entry);
- goto exists;
+ goto spanning_store;
}
/* At this point, we are at the leaf node that needs to be altered. */
if (!mas_commit_b_node(mas, &b_node, end))
return NULL;
+complete_at_root:
if (ret > 2)
return NULL;
append:
+spanning_store:
exists:
return content;
}
return;
allocated:
+ mas->tree->ma_flags = oldmas->tree->ma_flags;
mas->node = mas_dup_node(oldmas, mas);
mte_to_node(mas->node)->parent = ma_parent_ptr(
check_index_load(mt, j);
check_load(mt, i - 1, NULL);
+ mt_set_in_rcu(mt);
+ MT_BUG_ON(mt, !mt_height(mt));
+ mt_clear_in_rcu(mt);
+ MT_BUG_ON(mt, !mt_height(mt));
i--;
}
check_load(mt, max + 1, NULL);
for (j = 0; j <= i; j++)
check_index_load(mt, j);
+ if(i)
+ MT_BUG_ON(mt, !mt_height(mt));
check_load(mt, i + 1, NULL);
}
if (verbose) {
)
#define check_erase2_debug 0
void *mas_next(struct ma_state *mas, unsigned long max);
+int mas_ce2_over_cnt(struct ma_state *mas_start, struct ma_state *mas_end,
+ void *s_entry, unsigned long s_min,
+ void *e_entry, unsigned long e_max,
+ unsigned long *set, int i, bool null_entry)
+{
+ unsigned char cnt = 0;
+ unsigned long retry = 0;
+ MA_STATE(null_mas, mas_start->tree, mas_start->index - 1,
+ mas_end->index - 1);
+
+ // check start
+ if (s_entry && (s_min == mas_start->index)) {
+ cnt++;
+ if (null_entry && !mas_load(&null_mas)) {
+ printk("null at %lu\n", null_mas.last);
+ cnt++;
+ }
+ }
+
+ // check end
+ if (e_entry && (e_max == mas_end->last)) {
+ cnt++;
+ if (null_entry) {
+ null_mas.index = mas_end->last + 1;
+ null_mas.last = mas_end->last + 1;
+ mas_reset(&null_mas);
+ if (!mas_load(&null_mas)) {
+ printk("null at %lu\n", null_mas.last);
+ cnt++;
+ }
+ }
+ }
+
+ // count slots
+ s_entry = mas_next(mas_start, set[i+2]);
+ while (!mas_is_none(mas_start) &&
+ (mas_start->last != e_max)) {
+ BUG_ON(retry > 50); // stop infinite retry on testing.
+ if (xa_is_zero(s_entry)) {
+ retry++;
+ continue;
+ }
+ if (s_entry)
+ cnt++;
+
+ s_entry = mas_next(mas_start, set[i+2]);
+ }
+
+ return cnt - 1;
+}
static noinline void check_erase2_testset(struct maple_tree *mt,
unsigned long *set, unsigned long size)
{
void *foo;
unsigned long addr = 0;
void *s_entry = NULL, *e_entry = NULL;
- unsigned long retry = 0;
MA_STATE(mas, mt, 0, 0);
entry_cnt--;
else if ((s_min != set[i+1]) && (s_max != set[i+2]))
entry_cnt++;
+ else if ((s_min == mas_start.index)
+ && (e_max == mas_end.last)) {
+ entry_cnt -=
+ mas_ce2_over_cnt(&mas_start, &mas_end,
+ s_entry, s_min,
+ e_entry, e_max, set, i,
+ true);
+ // Since NULLs are combined, the prev and next
+ // slots need to be checked for NULL.
+ }
+
erase_check_store_range(mt, set, i + 1, value);
break;
entry_cnt++;
}
} else {
- unsigned char cnt = 0;
- // check start
- if (s_entry && (s_min == mas_start.index))
- cnt++;
-
- // check end
- if (e_entry && (e_max == mas_end.last))
- cnt++;
- // count slots
- s_entry = mas_next(&mas_start,
- set[i+2]);
- while (!mas_is_none(&mas_start) &&
- (mas_start.last != e_max)) {
- BUG_ON(retry > 50); // stop infinite retry on testing.
- if (xa_is_zero(s_entry)) {
- retry++;
- continue;
- }
- if (s_entry)
- cnt++;
-
- s_entry = mas_next(&mas_start,
- set[i+2]);
- }
- entry_cnt = entry_cnt - cnt + 1;
+ entry_cnt -=
+ mas_ce2_over_cnt(&mas_start, &mas_end,
+ s_entry, s_min,
+ e_entry, e_max, set, i,
+ false);
}
erase_check_store_range(mt, set, i + 1, value);
break;
}
mt_validate(mt);
+ if (entry_cnt)
+ MT_BUG_ON(mt, !mt_height(mt));
#if check_erase2_debug > 1
mt_dump(mt);
#endif
check_insert_range(mt, r[0], r[1], xa_mk_value(r[0]), 0);
check_insert_range(mt, r[2], r[3], xa_mk_value(r[2]), 0);
check_insert_range(mt, r[4], r[5], xa_mk_value(r[4]), -EEXIST);
+ MT_BUG_ON(mt, !mt_height(mt));
// Store
check_store_range(mt, r[4], r[5], xa_mk_value(r[4]), 0);
check_store_range(mt, r[6], r[7], xa_mk_value(r[6]), 0);
check_store_range(mt, r[8], r[9], xa_mk_value(r[8]), 0);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
+ MT_BUG_ON(mt, mt_height(mt));
check_seq(mt, 50, false);
mt_set_non_kernel(4);
check_store_range(mt, 5, 47, xa_mk_value(47), 0);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
// Create tree of 1-100
// Store 45-168
mt_set_non_kernel(10);
check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
// Create tree of 1-200
check_seq(mt, 200, false);
// Store 45-168
check_store_range(mt, r[10], r[11], xa_mk_value(r[10]), 0);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
check_seq(mt, 30, false);
check_store_range(mt, 6, 18, xa_mk_value(6), 0);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
//rcu_barrier();
//return;
check_load(mt, 135, NULL);
check_load(mt, 140, NULL);
mt_set_non_kernel(0);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
check_load(mt, i, xa_mk_value(353));
check_load(mt, 362, xa_mk_value(362));
mt_set_non_kernel(0);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
mt_set_non_kernel(50);
check_load(mt, 364, NULL);
check_load(mt, 365, xa_mk_value(365));
mt_set_non_kernel(0);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
mt_set_non_kernel(5);
check_load(mt, i, xa_mk_value(352));
check_load(mt, 365, xa_mk_value(365));
mt_set_non_kernel(0);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
check_store_range(mt, 353, 361, xa_mk_value(353), 0);
mt_set_non_kernel(0);
mt_validate(mt);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
// Interesting cases:
// 1. Overwrite the end of a node and end in the first entry of the next
check_store_range(mt, 2435, 2460, xa_mk_value(2435), 0);
check_store_range(mt, 2461, 2470, xa_mk_value(2461), 0);
mt_set_non_kernel(0);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
// Test rebalance gaps
mtree_erase(mt, 220);
mtree_erase(mt, 230);
mt_set_non_kernel(0);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
}
check_store_range(mt, 4600, 4959, xa_mk_value(1), 0);
mt_validate(mt);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
check_store_range(mt, 4862, 4862, xa_mk_value(4862), 0);
check_store_range(mt, 4842, 4849, NULL, 0);
mt_validate(mt);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
// Cause a 3 child split.
check_store_range(mt, 1792, 1799, NULL, 0);
mt_validate(mt);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
// Cause a 3 child split all the way up the tree.
check_store_range(mt, 1349, 1350, NULL, 0);
mt_validate(mt);
+ mt_set_in_rcu(mt);
+ MT_BUG_ON(mt, !mt_height(mt));
+ mt_clear_in_rcu(mt);
+ MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
}