}
}
+static inline void *mtree_range_walk(struct ma_state *mas)
+{
+ unsigned long *pivots;
+ unsigned char offset;
+ struct maple_node *node;
+ struct maple_enode *next, *last;
+ enum maple_type type;
+ void __rcu **slots;
+ unsigned char end;
+ unsigned long max, min;
+ unsigned long prev_max, prev_min;
+
+ last = next = mas->node;
+ prev_min = min = 0;
+ max = ULONG_MAX;
+ do {
+ offset = 0;
+ last = next;
+ node = mte_to_node(next);
+ type = mte_node_type(next);
+ pivots = ma_pivots(node, type);
+ if (unlikely(ma_dead_node(node)))
+ goto dead_node;
+
+ end = ma_data_end(node, type, pivots, max);
+ if (pivots[offset] >= mas->index) {
+ prev_max = max;
+ max = pivots[offset];
+ goto next;
+ }
+
+ do {
+ offset++;
+ } while((offset < end) && (pivots[offset] < mas->index));
+
+ prev_max = max;
+ prev_min = min;
+
+ if (offset < end)
+ max = pivots[offset];
+
+ if (offset)
+ min = pivots[offset - 1] + 1;
+next:
+ if (likely(offset > end) && pivots[offset])
+ max = pivots[offset];
+
+ slots = ma_slots(node, type);
+ next = mt_slot(mas->tree, slots, offset);
+ if (unlikely(ma_dead_node(node)))
+ goto dead_node;
+ } while (!ma_is_leaf(type));
+
+ mas->offset = offset;
+ mas->index = min;
+ mas->last = max;
+ mas->min = prev_min;
+ mas->max = prev_max;
+ mas->node = last;
+ return (void *) next;
+
+dead_node:
+ mas_reset(mas);
+ return NULL;
+}
+
/*
* mtree_lookup_walk() - Internal quick lookup that does not keep maple state up
* to date.
static inline void mas_rewalk(struct ma_state *mas, unsigned long index)
{
- unsigned long range_max, range_min;
+retry:
mas_set(mas, index);
- mas_tree_walk(mas, &range_min, &range_max);
- mas->last = range_max;
- mas->index = range_min;
+ mas_start(mas);
+ if (mas_is_none(mas))
+ goto not_found;
+
+ if (mas_is_ptr(mas)) {
+ if (!mas->index)
+ return;
+
+ goto not_found;
+ }
+
+ mtree_range_walk(mas);
+ if (mas_is_start(mas))
+ goto retry;
+
+ return;
+
+not_found:
+ mas->offset = MAPLE_NODE_SLOTS;
}
+
/*
* mas_dead_node() - Check if the maple state is pointing to a dead node.
* @mas: The maple state
*/
void *mt_find(struct maple_tree *mt, unsigned long *index, unsigned long max)
{
- unsigned long range_start = 0, range_end = 0;
- void *entry = NULL;
+ MA_STATE(mas, mt, *index, *index);
+ void *entry;
#ifdef CONFIG_DEBUG_MAPLE_TREE
unsigned long copy = *index;
#endif
- MA_STATE(mas, mt, *index, *index);
+ trace_ma_read(__func__, &mas);
if ((*index) > max)
return NULL;
rcu_read_lock();
- if (mas_tree_walk(&mas, &range_start, &range_end)) {
- if (unlikely(mas_is_ptr(&mas)) && !(*index))
- return mas_root(&mas);
- entry = mas_get_slot(&mas, mas.offset);
- }
+retry:
+ entry = mas_start(&mas);
+ if (unlikely(mas_is_none(&mas)))
+ goto unlock;
- mas.last = range_end;
- if (entry && !xa_is_zero(entry)) {
- rcu_read_unlock();
- goto done;
+ if (unlikely(mas_is_ptr(&mas))) {
+ if (index)
+ entry = NULL;
+
+ goto unlock;
}
- mas.index = range_start;
+ entry = mtree_range_walk(&mas);
+ if (xa_is_zero(entry))
+ entry = NULL;
+
+ if (entry)
+ goto unlock;
+
+ if (!entry && unlikely(mas_is_start(&mas)))
+ goto retry;
+
while (mas_searchable(&mas) && (mas.index < max)) {
entry = mas_next_entry(&mas, max);
if (likely(entry && !xa_is_zero(entry)))
break;
}
- rcu_read_unlock();
if (unlikely(xa_is_zero(entry)))
entry = NULL;
-done:
+unlock:
+ rcu_read_unlock();
if (likely(entry)) {
*index = mas.last + 1;
#ifdef CONFIG_DEBUG_MAPLE_TREE
MT_BUG_ON(mt, (*index) && ((*index) <= copy));
#endif
}
+ if (xa_is_zero(entry))
+ return NULL;
return entry;
}
/* #define BENCH_NODE_STORE */
/* #define BENCH_AWALK */
/* #define BENCH_WALK */
+/* #define BENCH_MT_FOR_EACH */
/* #define BENCH_FORK */
static
int mtree_insert_index(struct maple_tree *mt, unsigned long index, gfp_t gfp)
#if defined(BENCH_WALK)
static noinline void bench_walk(struct maple_tree *mt)
{
- int i, max = 2500, count = 500000000;
+ int i, max = 2500, count = 550000000;
MA_STATE(mas, mt, 1470, 1470);
for (i = 0; i < max; i += 10)
}
#endif
+#if defined(BENCH_MT_FOR_EACH)
+static noinline void bench_mt_for_each(struct maple_tree *mt)
+{
+ int i, count = 1000000;
+ unsigned long max = 2500, index = 0;
+ void *entry;
+
+ for (i = 0; i < max; i += 5)
+ mtree_store_range(mt, i, i + 4, xa_mk_value(i), GFP_KERNEL);
+
+ for (i = 0; i < count; i++) {
+ unsigned long j = 0;
+ mt_for_each(mt, entry, index, max) {
+ MT_BUG_ON(mt, entry != xa_mk_value(j));
+ j+=5;
+ }
+
+ index = 0;
+ }
+
+}
+#endif
+
static noinline void check_forking(struct maple_tree *mt)
{
mtree_destroy(&tree);
goto skip;
#endif
+#if defined(BENCH_MT_FOR_EACH)
+#define BENCH
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
+ bench_mt_for_each(&tree);
+ mtree_destroy(&tree);
+ goto skip;
+#endif
test_kmem_cache_bulk();