*/
if (unlikely((mas->mas_flags & MA_STATE_BULK))) {
*mid_split = 0;
- split = b_end - mt_min_slots[bn->type];
+ split = b_end - mt_min_slots[bn->type] - 1;
if (!ma_is_leaf(bn->type))
return split;
tmp = mas_data_end(&l_mas) - split;
memcpy(slots, l_slots + split + 1, sizeof(void *) * tmp);
- memcpy(pivs, l_pivs + split + 1, sizeof(unsigned long) * tmp);
- pivs[tmp] = l_mas.max;
+ memcpy(pivs, l_pivs + split + 1, sizeof(unsigned long) * (tmp - 1));
+ pivs[tmp - 1] = l_mas.max;
memcpy(slots + tmp, ma_slots(node, mt), sizeof(void *) * end);
memcpy(pivs + tmp, ma_pivots(node, mt), sizeof(unsigned long) * end);
+
l_mas.max = l_pivs[split];
mas->min = l_mas.max + 1;
old_eparent = mt_mk_node(mte_parent(l_mas.node),
mas_start(mas);
mtree_range_walk(mas);
end = mas->end + 1;
- if (end < mt_min_slot_count(mas->node) - 1)
+ if (mas->end < mt_min_slot_count(mas->node))
mas_destroy_rebalance(mas, end);
mas->mas_flags &= ~MA_STATE_REBALANCE;
*/
static inline void check_bulk_rebalance(struct maple_tree *mt)
{
- MA_STATE(mas, mt, ULONG_MAX, ULONG_MAX);
+ MA_STATE(mas, mt, 0, 0);
int max = 10;
+ int i = 0;
- build_full_tree(mt, 0, 2);
-
- /* erase every entry in the tree */
- do {
+ while(mt_height(mt) < 3) {
/* set up bulk store mode */
mas_expected_entries(&mas, max);
- mas_erase(&mas);
+ mas_set_range(&mas, i, i + 9);
+ mas_store_gfp(&mas, xa_mk_value(0xA), GFP_KERNEL);
MT_BUG_ON(mt, mas.store_type == wr_rebalance);
- } while (mas_prev(&mas, 0) != NULL);
+ mas_destroy(&mas);
+ mt_validate(mt);
+ i += 10;
+ }
mas_destroy(&mas);
}