From 78fc1c6ef8a0c6982e4c1878596d8180cdd1cb34 Mon Sep 17 00:00:00 2001 From: "Liam R. Howlett" Date: Tue, 3 Mar 2020 12:03:47 -0500 Subject: [PATCH] maple_tree: Combine gaps with retry entries. When a retry entry is hit, combine it into the gap as it was relocated to where the gap exists now. Signed-off-by: Liam R. Howlett --- lib/maple_tree.c | 29 +++++++++++++++++++++-------- lib/test_maple_tree.c | 9 +++++++++ 2 files changed, 30 insertions(+), 8 deletions(-) diff --git a/lib/maple_tree.c b/lib/maple_tree.c index c60d3c968372..faef5eb79bb3 100644 --- a/lib/maple_tree.c +++ b/lib/maple_tree.c @@ -927,8 +927,15 @@ static inline void mas_shift_pivot(struct ma_state *curr, mas_ascend(&right); mas_set_safe_pivot(&left, l_p_slot, piv); - if (left.node == right.node) + if (left.node == right.node) { + if (r_p_slot - 1 != l_p_slot) { + int slot = l_p_slot + 1; + do { + mte_set_pivot(left.node, slot, piv); + } while (++slot < r_p_slot); + } break; + } mas_set_slot(&left, l_p_slot); mas_set_slot(&right, r_p_slot); @@ -2203,6 +2210,7 @@ static inline void mas_may_move_gap(struct ma_state *mas) unsigned char end; unsigned char slot = mas_get_slot(mas); void *entry = NULL; + void *next_entry = NULL; // node that starts with NULL MA_STATE(next, mas->tree, mas->index, mas->last); @@ -2227,25 +2235,30 @@ static inline void mas_may_move_gap(struct ma_state *mas) end = _mas_data_end(&curr, mte_node_type(curr.node), &last_piv, &coalesce); entry = mas_get_rcu_slot(&curr, end); - if (entry && !xa_is_deleted(entry)) + if (entry && (!xa_is_deleted(entry))) return; - if (curr.node == mas->node) + if (curr.node == mas->node) // current wasn't shifted to prev. mas_next(&next, ULONG_MAX); if (mas_is_none(&next)) return; - if (!mt_is_empty(mas_get_rcu_slot(&next, 0))) + next_entry = mas_get_rcu_slot(&next, 0); + if (next_entry && !mt_will_coalesce(next_entry)) return; - while (end && (!entry || xa_is_deleted(entry))) + /* At this point curr has a gap at the end next has a gap at the + * start, see if there are more to shift. + */ + + while (end && (!entry || mt_will_coalesce(entry))) entry = mas_get_rcu_slot(&curr, --end); + // Set the slot to 0 in case of deleted or retry. + if (next_entry) + mte_set_rcu_slot(next.node, 0, NULL); last_piv = mas_get_safe_pivot(&curr, end); - - // At this point, mas->node is the one with end slot = NULL - // check.node starts with NULL. if (end < mt_pivot_count(curr.node) - 1) mte_set_pivot(curr.node, end + 1, 0); diff --git a/lib/test_maple_tree.c b/lib/test_maple_tree.c index 3b4945c69f9c..ebab8f995c57 100644 --- a/lib/test_maple_tree.c +++ b/lib/test_maple_tree.c @@ -11401,6 +11401,15 @@ static noinline void check_gap_combining(struct maple_tree *mt) MT_BUG_ON(mt, mas.index != 79); rcu_read_unlock(); + // Test retry entry in the start of a gap. + mtree_test_store_range(mt, 78, 80, NULL); + mtree_test_erase(mt, 81); + mas_reset(&mas); + rcu_read_lock(); + MT_BUG_ON(mt, mas_get_unmapped_area_rev(&mas, 76, 85, 4)); + rcu_read_unlock(); + MT_BUG_ON(mt, mas.index != 78); + mtree_destroy(mt); mtree_init(mt, MAPLE_ALLOC_RANGE); check_seq(mt, 2000, false); -- 2.50.1