mte_set_pivot(left.node, slot, piv);
} while (++slot < r_p_slot);
}
- break;
+ return;
}
-
- mas_set_slot(&left, l_p_slot);
- mas_set_slot(&right, r_p_slot);
} while (left.node != right.node);
}
/* mas_get_prev_pivot() - Return the previous pivot.
unsigned long last_piv;
unsigned char coalesce;
unsigned char end;
+ unsigned char new_end;
unsigned char slot = mas_get_slot(mas);
void *entry = NULL;
void *next_entry = NULL;
* start, see if there are more to shift.
*/
- while (end && (!entry || mt_will_coalesce(entry)))
- entry = mas_get_rcu_slot(&curr, --end);
+ new_end = end;
+ while (new_end && (!entry || mt_will_coalesce(entry)))
+ entry = mas_get_rcu_slot(&curr, --new_end);
// Set the slot to 0 in case of deleted or retry.
if (next_entry)
mte_set_rcu_slot(next.node, 0, NULL);
- last_piv = mas_get_safe_pivot(&curr, end);
- if (end < mt_pivot_count(curr.node) - 1)
- mte_set_pivot(curr.node, end + 1, 0);
+
+ last_piv = mas_get_safe_pivot(&curr, new_end);
+ while (new_end++ != end) {
+ if (new_end < mt_pivot_count(curr.node))
+ mte_set_pivot(curr.node, new_end, 0);
+ // The location storing these values has moved.
+ // FIXME: We need to allocate a new node.
+ // FIXME: What if allocation fails?
+ mte_set_rcu_slot(curr.node, new_end, NULL);
+ }
if (curr.node == mas->node)
mas->max = last_piv;
else
mas->min = last_piv + 1;
- mas_shift_pivot(&curr, &next, last_piv);
next.min = last_piv + 1;
curr.max = last_piv;
+ mas_shift_pivot(&curr, &next, last_piv);
if (mt_is_alloc(mas->tree)) {
mas_update_gap(&curr, false);
goto counted;
}
- for(i = 0; i < mt_slot_count(mte); i++) {
+ for (i = 0; i < mt_slot_count(mte); i++) {
p_end = mas_get_safe_pivot(mas, i);
if (!p_end && i)
p_end = mas->max;
mas_mn(mas), i,
mas_get_rcu_slot(mas, i), gap,
p_end, p_start);
+ mt_dump(mas->tree);
MT_BUG_ON(mas->tree,
gap != p_end - p_start + 1);
pr_err(MA_PTR"[%u] %lu >= %lu - %lu + 1 (%lu)\n",
mas_mn(mas), i, gap, p_end, p_start,
p_end - p_start + 1);
+ mt_dump(mas->tree);
MT_BUG_ON(mas->tree,
gap > p_end - p_start + 1);
}
not_empty:
p_start = p_end + 1;
if (p_end >= mas->max)
- break; }
+ break;
+ }
counted:
if (mte_is_root(mte))
p_slot = mte_parent_slot(mas->node);
p_mn = mte_parent(mte);
MT_BUG_ON(mas->tree, max_gap > mas->max);
- if (ma_get_gap(p_mn, p_slot, mas_parent_enum(mas, mte)) != max_gap)
+ if (ma_get_gap(p_mn, p_slot, mas_parent_enum(mas, mte)) != max_gap) {
pr_err("gap "MA_PTR"[%u] != %lu\n", p_mn, p_slot, max_gap);
+ mt_dump(mas->tree);
+ }
MT_BUG_ON(mas->tree,
ma_get_gap(p_mn, p_slot, mas_parent_enum(mas, mte)) !=
mt_dump(mas->tree);
pr_err(MA_PTR"[%u] %lu < %lu\n", mas_mn(mas), i,
piv, mas->min);
+ mt_dump(mas->tree);
MT_BUG_ON(mas->tree, piv < mas->min);
}
}
if (piv > mas->max) {
pr_err(MA_PTR"[%u] %lu > %lu\n", mas_mn(mas), i, piv,
mas->max);
+ mt_dump(mas->tree);
MT_BUG_ON(mas->tree, piv > mas->max);
}
prev_piv = piv;
*/
};
-
int cnt = 0;
MA_STATE(mas, mt, 0, 0);
rcu_read_lock();
MT_BUG_ON(mt, mas_get_unmapped_area_rev(&mas, 76, 81, 2));
MT_BUG_ON(mt, mas.index != 79);
+ mt_validate(mt);
rcu_read_unlock();
// Test retry entry in the start of a gap.
MT_BUG_ON(mt, mas_get_unmapped_area_rev(&mas, 76, 85, 4));
rcu_read_unlock();
MT_BUG_ON(mt, mas.index != 78);
+ mt_validate(mt);
mtree_destroy(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
MT_BUG_ON(mt, mas_get_unmapped_area_rev(&mas, 1700, 1800, 2));
MT_BUG_ON(mt, mas.index != 1791);
rcu_read_unlock();
+ mt_validate(mt);
mtree_destroy(mt);
}