* @l_mas: The left maple state
* @r_mas: The right maple state
*/
-static inline void mas_extend_spanning_null(
- struct ma_state *l_mas, unsigned long r_min, void *content,
- struct ma_wr_state *r_wr_mas)
-{
- unsigned char l_slot = l_mas->offset;
- struct maple_node *l_node = mas_mn(l_mas);
- enum maple_type l_type = mte_node_type(l_mas->node);
- void __rcu **slots = ma_slots(l_node, l_type);
+static inline void mas_extend_spanning_null(struct ma_wr_state *l_wr_mas,
+ struct ma_wr_state *r_wr_mas)
+{
struct ma_state *r_mas = r_wr_mas->mas;
-
-
- /* Expand NULL to start of the range. */
- if (!content)
- l_mas->index = r_min;
-
- if ((l_mas->index == r_min) &&
- (l_slot && !slots[l_slot - 1])) {
- if (l_slot > 1) {
- unsigned long *pivots = ma_pivots(l_node, l_type);
-
- l_mas->index = pivots[l_slot - 2] + 1;
- } else
+ struct ma_state *l_mas = l_wr_mas->mas;
+ unsigned char l_slot;
+
+
+ l_slot = l_mas->offset;
+ if (!l_wr_mas->content)
+ l_mas->index = l_wr_mas->r_min;
+ else if ((l_mas->index == l_wr_mas->r_min) &&
+ (l_slot &&
+ !mas_slot_locked(l_mas, l_wr_mas->slots, l_slot - 1))) {
+ if (l_slot > 1)
+ l_mas->index = l_wr_mas->pivots[l_slot - 2] + 1;
+ else
l_mas->index = l_mas->min;
+
l_mas->offset = l_slot - 1;
}
*/
static inline int mas_spanning_store(struct ma_wr_state *wr_mas, void *entry)
{
- struct maple_big_node b_node;
+ struct ma_wr_state r_wr_mas, l_wr_mas;
struct maple_subtree_state mast;
+ struct maple_big_node b_node;
+ struct ma_state *mas;
unsigned char height;
- int node_count;
- void *l_content;
- unsigned long r_min;
- unsigned char l_end;
- struct ma_state *mas = wr_mas->mas;
- struct ma_wr_state r_wr_mas;
- /* Holds new left and right sub-tree */
- MA_STATE(l_mas, mas->tree, mas->index, mas->index);
- MA_STATE(r_mas, mas->tree, mas->index, mas->index);
+ mas = wr_mas->mas;
+ /* Left and Right side of spanning store */
+ MA_STATE(l_mas, NULL, 0, 0);
+ MA_STATE(r_mas, NULL, 0, 0);
trace_ma_op(__func__, mas);
* entries per level plus a new root.
*/
height = mas_mt_height(mas);
- node_count = 1 + height * 3;
- mas_node_count(mas, node_count);
+ mas_node_count(mas, 1 + height * 3);
if (mas_is_err(mas))
return 0;
- mast.bn = &b_node;
- b_node.type = mte_node_type(mas->node);
- mast.orig_l = &l_mas;
- mast.orig_r = &r_mas;
-
/* Set up right side. */
r_mas = *mas;
/* Avoid overflow. */
/* Set up left side. */
l_mas = *mas;
- l_content = mtree_range_walk(&l_mas);
- r_min = l_mas.index;
- l_mas.index = mas->index;
- l_mas.last = mas->last;
+ l_wr_mas.mas = &l_mas;
+ l_mas.last = l_mas.index;
+ mas_wr_walk(&l_wr_mas);
if (!entry) {
- mas_extend_spanning_null(&l_mas, r_min, l_content, &r_wr_mas);
- mas->index = l_mas.index;
- mas->last = l_mas.last = r_mas.index = r_mas.last;
+ mas_extend_spanning_null(&l_wr_mas, &r_wr_mas);
mas->offset = l_mas.offset;
- }
+ mas->index = l_mas.index;
+ mas->last = l_mas.last = r_mas.last;
+ } else
+ l_mas.last = mas->last;
+ b_node.type = wr_mas->type;
/* Copy l_mas and store the value in b_node. */
- l_end = mas_data_end(&l_mas);
- b_node.b_end = mas_store_b_node(&l_mas, &b_node, entry, l_end, l_end,
- l_content);
+ b_node.b_end = mas_store_b_node(&l_mas, &b_node, entry,
+ l_wr_mas.node_end, l_wr_mas.node_end,
+ l_wr_mas.content);
/* Copy r_mas into b_node. */
mas_mab_cp(&r_mas, r_mas.offset, r_wr_mas.node_end,
&b_node, b_node.b_end + 1);
/* Stop spanning searches by searching for just index. */
l_mas.index = l_mas.last = mas->index;
+ mast.bn = &b_node;
+ mast.orig_l = &l_mas;
+ mast.orig_r = &r_mas;
/* Combine l_mas and r_mas and split them up evenly again. */
return mas_spanning_rebalance(mas, &mast, height + 1);
}