From f9259db6e30b1e88ff0a3c7bc2d107309d0eb9fa Mon Sep 17 00:00:00 2001 From: "Liam R. Howlett" Date: Mon, 14 Apr 2025 15:39:23 -0400 Subject: [PATCH] broken for now Signed-off-by: Liam R. Howlett --- lib/maple_tree.c | 185 ++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 160 insertions(+), 25 deletions(-) diff --git a/lib/maple_tree.c b/lib/maple_tree.c index 270d3a76554b..9a4307923f5a 100644 --- a/lib/maple_tree.c +++ b/lib/maple_tree.c @@ -3188,12 +3188,16 @@ void mni_node_part_init(struct ma_node_part *part, part->pivots[0] = left->max; part->gaps[0] = left->max_gap; - part->slots[1] = right->enode; - part->pivots[1] = right->max; - part->gaps[1] = right->max_gap; + if (right) { + part->slots[1] = right->enode; + part->pivots[1] = right->max; + part->gaps[1] = right->max_gap; + part->size = 2; + } else { + part->size = 1; + } part->pos = 0; - part->size = 2; part->unfinished = false; part->dst_max_off = 255; part->skip = 1; @@ -3958,8 +3962,8 @@ bool mas_wr_try_rebalance(struct ma_state *mas, struct ma_node_info *src, { struct ma_state tmp_mas; struct ma_node_info src2, parent, new_parent; - unsigned char split, max; bool left_store = false; + trace_ma_op(__func__, mas); /* * It is currently not known if the rebalance can work, so this is to @@ -3968,9 +3972,6 @@ bool mas_wr_try_rebalance(struct ma_state *mas, struct ma_node_info *src, tmp_mas = *mas; mas_wr_ascend_init(&tmp_mas, &parent); - max = mt_slots[src->type] - 1; - if (ma_is_leaf(src->type)) - max--; src->end = mas->end; if (!parent.insert_off) /* No left sibling */ @@ -3980,9 +3981,9 @@ bool mas_wr_try_rebalance(struct ma_state *mas, struct ma_node_info *src, tmp_mas.offset--; mas_descend(&tmp_mas); mni_mas_init(&src2, &tmp_mas); - split = mas_wr_rebalance_calc(src2.end + sd->new_end, src2.type); + sd->split = mas_wr_rebalance_calc(src2.end + sd->new_end, src2.type); - if (split) { + if (sd->split) { parent.insert_off--; /* Left will be src2, right will be src */ left->min = src2.min; @@ -3997,11 +3998,11 @@ try_right: mas_descend(&tmp_mas); mni_mas_init(&src2, &tmp_mas); mni_set_end(&src2); - split = mas_wr_rebalance_calc(src2.end + sd->new_end, src2.type); - if (!split) + sd->split = mas_wr_rebalance_calc(src2.end + sd->new_end, src2.type); + if (!sd->split) return false; - split = src2.end + sd->new_end - split; + sd->split = src2.end + sd->new_end - sd->split; left_store = true; /* Left will be src, right will be src2 */ left->min = src->min; @@ -4009,18 +4010,17 @@ try_right: } /* The rebalance operation will succeed. */ - sd->split = split; sd->new_end += src2.end + 1; if (left_store) { /* Left pushes data right. */ sd->insert = mas->offset; - sd->space = split; + sd->space = sd->split; sd->offset = 0; } else { /* Right pushes data left */ sd->insert = mas->offset + src2.end + 1; sd->offset = src2.end + 1; - sd->space = split - src2.end; + sd->space = sd->split - src2.end; sd->states[sd->len].info = &src2; mns_mni_init(&sd->states[sd->len], left, 0, src2.end + 1); sd->len++; @@ -4052,18 +4052,155 @@ try_right: return true; } -#if 0 +#if 1 static void mas_wr_rebalance(struct ma_wr_state *wr_mas) { - struct ma_node_info src_info, parent, left, right; - struct ma_node_state src; - struct ma_node_part part; + struct ma_state *mas; + struct ma_state tmp_mas; + struct ma_node_info src2, parent, new_parent; + unsigned char max, height; + bool left_store = false; - mni_node_init(&left, mas_pop_node(mas), wr_mas->type); - if (mt_is_alloc(mas->tree)) - right.alloc = left.alloc = true; + mas = wr_mas->mas; + trace_ma_op(__func__, mas); + + /* + * This rebalance differs from try_rebalance() by knowing that the data + * has decreased in the node, so a rebalance will work with the sibling. + * Rebalancing will pull data from the right first, keeping the tree + * compact to the left side. + */ + mt_dump(mas->tree, mt_dump_dec); + + mas->depth = height; + + mni_mas_init(&src, mas); + tmp_mas = *mas; + while (height--) { + mas_wr_ascend_init(&tmp_mas, &parent); + max = mt_slots[src->type] - 1; + if (ma_is_leaf(src->type)) + max--; + + src->end = mas->end; + if (parent.insert_off != mas->end) { + /* Right sibling exists, rebalance against right */ + tmp_mas.offset = parent.insert_off + 1; + mas_descend(&tmp_mas); + mni_mas_init(&src2, &tmp_mas); + mni_set_end(&src2); + left_store = true; + sd->insert = mas->offset; + } else { + /* Rebalance against left sibling */ + tmp_mas.offset--; + mas_descend(&tmp_mas); + mni_mas_init(&src2, &tmp_mas); + mni_node_init(&right, mas_pop_node(mas), wr_mas->type); + parent.insert_off--; + sd->insert = mas->offset + src2.end + 1; + } + + sd->new_end += src2.end + 1; + mni_node_init(&left, mas_pop_node(mas), wr_mas->type); + if (sd->new_end > 2 * mt_min_slots[src.type]) { + printk("Rebalance\n"); + mni_node_init(&right, mas_pop_node(mas), wr_mas->type); + sd->split = mas_wr_rebalance_calc(sd->new_end, + src2.type); + /* + * Rebalance between nodes is possible, so the + * operation stops early. + */ + + /* Taken from mas_wr_try_rebalance */ + if (left_store) { + /* Left pushes data right. */ + sd->insert = mas->offset; + sd->space = sd->split; + sd->offset = 0; + } else { + /* Right pushes data left */ + sd->insert = mas->offset + src2.end + 1; + sd->offset = src2.end + 1; + sd->space = sd->split - src2.end; + sd->states[sd->len].info = &src2; + mns_mni_init(&sd->states[sd->len], left, 0, src2.end + 1); + sd->len++; + } + + /* + * There are two splits of src, at insert and insert_end. + * There can also be a split between nodes that may happen at these + * boundaries, or elsewhere. + */ + mt_wr_split_data(src, left, right, part, sd); + if (left_store) { + sd->states[sd->len].info = &src2; + mns_mni_init(&sd->states[sd->len], right, 0, src2.end + 1); + sd->len++; + } + + mns_assemble(sd->states, sd->len); + mni_finalise(left); + mni_finalise(right); + mni_node_part_init(part, left, right); + part->skip = 2; + mas_ascend(mas); + mas->end = parent.end; + mas->offset = parent.insert_off; + mas_wr_converged(&parent, &new_parent, part, mas, sd); + src->enode = parent.enode; + mas->node = new_parent.enode; + /* FIXME: make another function */ + /* We can stop rebalancing here */ + goto replace_node; + } else { + /* + * Absorb all the data from two nodes, which may need + * more rebalancing + */ + sd->split = sd->new_end + 2; /* No split */ + + if (!left_store) { + sd->states[sd->len].info = &src2; + mns_mni_init(&sd->states[sd->len], left, 0, src2.end + 1); + sd->len++; + } + mt_wr_split_data(src, left, NULL, part, sd); + + if (left_store) { + sd->states[sd->len].info = &src2; + mns_mni_init(&sd->states[sd->len], left, 0, src2.end + 1); + sd->len++; + } + + mns_assemble(sd->states, sd->len); + mni_finalise(left); + if (ma_is_root(parent.node) && parent.end == 1) { + /* New root */ + } + if (parent.end > mt_min_slots[parent.type]) { + mni_node_part_init(part, left, NULL); + break; + } + } + } + + mas_ascend(mas); + mas->end = parent.end - 1; + mas->offset = parent.insert_off; + mas_wr_converged(&parent, &new_parent, part, mas, sd); + src->enode = parent.enode; + mas->node = new_parent.enode; +replace_node: + mas_wmb_replace(mas, src.enode); + mtree_range_walk(mas); + mt_dump(mas->tree, mt_dump_dec); } +#endif +#if 0 /* * There is insufficient data in the node after a store. * This rebalance will succeed, not like the split variant that will attempt @@ -4236,8 +4373,6 @@ static void mas_wr_split(struct ma_wr_state *wr_mas) struct split_data sd; int height; - - trace_ma_op(__func__, mas); height = mas_mt_height(mas); -- 2.49.0