From 3101046ea6ddaa439d66d82a87bdf8258f78f5f3 Mon Sep 17 00:00:00 2001 From: "Liam R. Howlett" Date: Thu, 3 Dec 2020 21:55:28 -0500 Subject: [PATCH] maple_tree: Change mab_split_calc() to do bulk calc first. Do the bulk calc first if necessary. Signed-off-by: Liam R. Howlett --- lib/maple_tree.c | 26 +++++++++++++++++--------- lib/test_maple_tree.c | 9 +++------ 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/lib/maple_tree.c b/lib/maple_tree.c index 2206cd14e1f5..bd16d398921c 100644 --- a/lib/maple_tree.c +++ b/lib/maple_tree.c @@ -1487,21 +1487,29 @@ static inline int mab_calc_split(struct ma_state *mas, unsigned char *mid_split) { int split = b_node->b_end / 2; // Assume equal split. - unsigned char slot_count = mt_slots[b_node->type]; + unsigned char min, slot_count = mt_slots[b_node->type]; + + if (unlikely((mas->mas_flags & MA_STATE_BULK))) { + *mid_split = 0; + if (ma_is_leaf(b_node->type)) + min = 2; + else + return b_node->b_end - mt_min_slots[b_node->type]; - if (mab_middle_node(b_node, split, slot_count)) { + split = b_node->b_end - min; + mas->mas_flags |= MA_STATE_REBALANCE; + if (!b_node->slot[split]) + split--; + return split; + } + + if (unlikely(mab_middle_node(b_node, split, slot_count))) { split = b_node->b_end / 3; *mid_split = split * 2; } else { - unsigned char min = mt_min_slots[b_node->type] - 1; + min = mt_min_slots[b_node->type]; *mid_split = 0; - if ((mas->mas_flags & MA_STATE_BULK) && - ma_is_leaf(b_node->type)) { - min = 2; - split = mt_slots[b_node->type] - min; - mas->mas_flags |= MA_STATE_REBALANCE; - } /* Avoid having a range less than the slot count unless it * causes one node to be deficient. * NOTE: mt_min_slots is 1 based, b_end and split are zero. diff --git a/lib/test_maple_tree.c b/lib/test_maple_tree.c index b47e09a3065e..df4c15a78421 100644 --- a/lib/test_maple_tree.c +++ b/lib/test_maple_tree.c @@ -35051,7 +35051,7 @@ static noinline void check_gap_combining(struct maple_tree *mt) unsigned long seq100[] = { /* 0-5 */ - 78, 79, 80, + 74, 75, 76, 50, 100, 2, /* 6-12 */ @@ -35237,9 +35237,8 @@ static void check_dfs_preorder(struct maple_tree *mt) count++; mas_dfs_preorder(&mas); } while (!mas_is_none(&mas)); - // 68 + MAS_START = 69 + 1 for no jitter //printk("count %lu\n", count); - MT_BUG_ON(mt, count != 70); + MT_BUG_ON(mt, count != 74); mtree_destroy(mt); mtree_init(mt, MAPLE_ALLOC_RANGE); @@ -35251,8 +35250,7 @@ static void check_dfs_preorder(struct maple_tree *mt) mas_dfs_preorder(&mas); } while (!mas_is_none(&mas)); //printk("count %lu\n", count); - // 71 + MAS_START = 72 + 1 for no jitter - MT_BUG_ON(mt, count != 72); + MT_BUG_ON(mt, count != 77); mtree_destroy(mt); mtree_init(mt, MAPLE_ALLOC_RANGE); @@ -35263,7 +35261,6 @@ static void check_dfs_preorder(struct maple_tree *mt) count++; mas_dfs_preorder(&mas); } while (!mas_is_none(&mas)); - // 71 + MAS_START = 72 //printk("count %lu\n", count); MT_BUG_ON(mt, count != 77); mtree_destroy(mt); -- 2.50.1