mte_node_type(next))))
return; /* no metadata, could be node */
}
- fallthrough;
+ //fallthrough;
case maple_arange_64:
meta = ma_meta(mn, type);
break;
if (likely(mas->offset)) {
mas->offset--;
mas->last = mas->index - 1;
+ mas->index = mas_safe_min(mas, pivots, mas->offset);
} else {
if (mas_prev_node(mas, min)) {
mas_rewalk(mas, save_point);
node = mas_mn(mas);
type = mte_node_type(mas->node);
pivots = ma_pivots(node, type);
+ mas->index = pivots[mas->offset - 1] + 1;
}
- mas->index = mas_safe_min(mas, pivots, mas->offset);
slots = ma_slots(node, type);
entry = mas_slot(mas, slots, mas->offset);
if (unlikely(mas_rewalk_if_dead(mas, node, save_point)))
*
* Return: The entry in the next slot which is possibly NULL
*/
-static inline void *mas_next_slot(struct ma_state *mas, unsigned long max)
+static inline void *mas_next_slot(struct ma_state *mas, unsigned long max,
+ bool null)
{
void __rcu **slots;
unsigned long *pivots;
type = mte_node_type(mas->node);
pivots = ma_pivots(node, type);
data_end = ma_data_end(node, type, pivots, mas->max);
- pivot = mas_logical_pivot(mas, pivots, mas->offset, type);
if (unlikely(mas_rewalk_if_dead(mas, node, save_point)))
goto retry;
- if (pivot >= max)
- return NULL;
+again:
+ if (mas->max >= max) {
+ if (likely(mas->min && mas->offset < data_end)) {
+ pivot = pivots[mas->offset];
+ if (!pivot)
+ pivot = mas->max;
+ } else {
+ pivot = mas_logical_pivot(mas, pivots, mas->offset,
+ type);
+ }
+
+ if (unlikely(mas_rewalk_if_dead(mas, node, save_point)))
+ goto retry;
+
+ if (pivot >= max)
+ return NULL;
+ }
if (likely(data_end > mas->offset)) {
+ mas->index = pivots[mas->offset] + 1;
mas->offset++;
- mas->index = mas->last + 1;
+ if (likely(mas->offset < data_end)) {
+ mas->last = pivots[mas->offset];
+ if (!mas->last)
+ mas->last = mas->max;
+ } else {
+ mas->last = mas_logical_pivot(mas, pivots, mas->offset, type);
+ }
} else {
if (mas_next_node(mas, node, max)) {
mas_rewalk(mas, save_point);
node = mas_mn(mas);
type = mte_node_type(mas->node);
pivots = ma_pivots(node, type);
+ mas->last = pivots[0];
}
slots = ma_slots(node, type);
- mas->last = mas_logical_pivot(mas, pivots, mas->offset, type);
- entry = mas_slot(mas, slots, mas->offset);
+ entry = mt_slot(mas->tree, slots, mas->offset);
if (unlikely(mas_rewalk_if_dead(mas, node, save_point)))
goto retry;
+ if (entry)
+ return entry;
+
+ if (!null) {
+ if (!mas->offset)
+ data_end = 2;
+ goto again;
+ }
+
return entry;
}
if (mas->last >= limit)
return NULL;
- entry = mas_next_slot(mas, limit);
- if (entry)
- return entry;
-
- if (mas_is_none(mas))
- return NULL;
-
- return mas_next_slot(mas, limit);
+ entry = mas_next_slot(mas, limit, false);
+ return entry;
}
/*
return entry;
/* Retries on dead nodes handled by mas_next_entry */
+ return mas_next_slot(mas, max, false);
return mas_next_entry(mas, max);
}
EXPORT_SYMBOL_GPL(mas_next);
return entry;
/* Retries on dead nodes handled by mas_next_slot */
- return mas_next_slot(mas, max);
+ return mas_next_slot(mas, max, true);
}
EXPORT_SYMBOL_GPL(mas_next_range);
mas->index = mas->last;
mas->node = MAS_START;
- }
-
- if (unlikely(mas_is_paused(mas))) {
+ } else if (unlikely(mas_is_paused(mas))) {
if (unlikely(mas->last >= max))
return true;
mas->node = MAS_START;
mas->index = ++mas->last;
- }
-
-
- if (unlikely(mas_is_ptr(mas)))
+ } else if (unlikely(mas_is_ptr(mas)))
goto ptr_out_of_range;
if (unlikely(mas_is_start(mas))) {
return entry;
/* Retries on dead nodes handled by mas_next_entry */
+ return mas_next_slot(mas, max, false);
return mas_next_entry(mas, max);
}
EXPORT_SYMBOL_GPL(mas_find);
return entry;
/* Retries on dead nodes handled by mas_next_entry */
- return mas_next_slot(mas, max);
+ return mas_next_slot(mas, max, false);
}
EXPORT_SYMBOL_GPL(mas_find_range);
/* #define BENCH_WALK */
/* #define BENCH_MT_FOR_EACH */
/* #define BENCH_FORK */
+/* #define BENCH_MAS_FOR_EACH */
#ifdef __KERNEL__
#define mt_set_non_kernel(x) do {} while (0)
mt_set_non_kernel(1);
mtree_erase(mt, 34148798727); /* create a deleted range. */
- mtree_erase(mt, 34148798725); /* create a deleted range. */
+ mtree_erase(mt, 34148798725);
check_mtree_alloc_rrange(mt, 0, 34359052173, 210253414,
34148798725, 0, mt);
}
#endif
+#if defined(BENCH_MAS_FOR_EACH)
+static noinline void __init bench_mas_for_each(struct maple_tree *mt)
+{
+ int i, count = 1000000;
+ unsigned long max = 2500;
+ void *entry;
+ MA_STATE(mas, mt, 0, 0);
+
+ for (i = 0; i < max; i += 5) {
+ int gap = 4;
+ if (i % 30 == 0)
+ gap = 3;
+ mtree_store_range(mt, i, i + gap, xa_mk_value(i), GFP_KERNEL);
+ }
+
+ rcu_read_lock();
+ for (i = 0; i < count; i++) {
+ unsigned long j = 0;
+
+ mas_for_each(&mas, entry, max) {
+ MT_BUG_ON(mt, entry != xa_mk_value(j));
+ j += 5;
+ }
+ mas_set(&mas, 0);
+ }
+ rcu_read_unlock();
+
+}
+#endif
+
/* check_forking - simulate the kernel forking sequence with the tree. */
static noinline void __init check_forking(struct maple_tree *mt)
{
mtree_destroy(&tree);
goto skip;
#endif
+#if defined(BENCH_MAS_FOR_EACH)
+#define BENCH
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
+ bench_mas_for_each(&tree);
+ mtree_destroy(&tree);
+ goto skip;
+#endif
mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_iteration(&tree);