mas_ascend(&right);
mas_set_safe_pivot(&left, l_p_slot, piv);
- if (left.node == right.node)
+ if (left.node == right.node) {
+ if (r_p_slot - 1 != l_p_slot) {
+ int slot = l_p_slot + 1;
+ do {
+ mte_set_pivot(left.node, slot, piv);
+ } while (++slot < r_p_slot);
+ }
break;
+ }
mas_set_slot(&left, l_p_slot);
mas_set_slot(&right, r_p_slot);
unsigned char end;
unsigned char slot = mas_get_slot(mas);
void *entry = NULL;
+ void *next_entry = NULL;
// node that starts with NULL
MA_STATE(next, mas->tree, mas->index, mas->last);
end = _mas_data_end(&curr, mte_node_type(curr.node), &last_piv,
&coalesce);
entry = mas_get_rcu_slot(&curr, end);
- if (entry && !xa_is_deleted(entry))
+ if (entry && (!xa_is_deleted(entry)))
return;
- if (curr.node == mas->node)
+ if (curr.node == mas->node) // current wasn't shifted to prev.
mas_next(&next, ULONG_MAX);
if (mas_is_none(&next))
return;
- if (!mt_is_empty(mas_get_rcu_slot(&next, 0)))
+ next_entry = mas_get_rcu_slot(&next, 0);
+ if (next_entry && !mt_will_coalesce(next_entry))
return;
- while (end && (!entry || xa_is_deleted(entry)))
+ /* At this point curr has a gap at the end next has a gap at the
+ * start, see if there are more to shift.
+ */
+
+ while (end && (!entry || mt_will_coalesce(entry)))
entry = mas_get_rcu_slot(&curr, --end);
+ // Set the slot to 0 in case of deleted or retry.
+ if (next_entry)
+ mte_set_rcu_slot(next.node, 0, NULL);
last_piv = mas_get_safe_pivot(&curr, end);
-
- // At this point, mas->node is the one with end slot = NULL
- // check.node starts with NULL.
if (end < mt_pivot_count(curr.node) - 1)
mte_set_pivot(curr.node, end + 1, 0);
MT_BUG_ON(mt, mas.index != 79);
rcu_read_unlock();
+ // Test retry entry in the start of a gap.
+ mtree_test_store_range(mt, 78, 80, NULL);
+ mtree_test_erase(mt, 81);
+ mas_reset(&mas);
+ rcu_read_lock();
+ MT_BUG_ON(mt, mas_get_unmapped_area_rev(&mas, 76, 85, 4));
+ rcu_read_unlock();
+ MT_BUG_ON(mt, mas.index != 78);
+
mtree_destroy(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
check_seq(mt, 2000, false);