return mas->node == MAS_START;
}
+static inline bool mas_is_err(struct ma_state *mas)
+{
+ return xa_is_err(mas->node);
+}
static inline bool mas_searchable(struct ma_state *mas)
{
node = (void *)((unsigned long)node | 4);
}
-static inline bool mas_is_err(struct ma_state *mas)
-{
- return xa_is_err(mas->node);
-}
-
static inline unsigned int mte_slot_mask(const struct maple_enode *node)
{
unsigned int bitmask = 0x78; // Bits 3-6
ret++;
// Detect duplicate null entries and don't copy them.
- if (null_entry && slot > 0) {
- struct maple_enode *slot_val = mte_get_rcu_slot(mas->node,
- slot - 1);
- if (!slot_val) {
+ if (null_entry) {
+ struct maple_enode *slot_val;
+ slot_val = mte_get_rcu_slot(mas->node, slot + 1);
+ if (mt_will_coalesce(slot_val)) {
+ if (ret)
+ ret--;
+ }
+ if (slot > 0) {
+ slot_val = mte_get_rcu_slot(mas->node, slot - 1);
+ if (mt_is_empty(slot_val)) {
slot--;
if (ret)
ret--;
}
+ }
}
/* Check for splitting */
new_end += ret - coalesce;
break;
mn = mte_get_rcu_slot(mas->node, slot);
- if (!mn)
+ if (mt_is_empty(mn))
continue;
if (level == 1) {
if (slot != 0 && pivot == 0)
goto no_entry;
+ if (r_start > max)
+ goto no_entry;
+
entry = mte_get_rcu_slot(mas->node, slot);
if (!mt_is_empty(entry))
goto found;
}
no_entry:
+ *range_start = r_start;
return false;
found:
retry:
*range_start = mas->last + 1;
-
while (!mas_is_none(mas)) {
+
unsigned char p_slot = 0;
+ slot = mas_get_slot(mas);
+ if (slot > mt_slot_count(mas->node))
+ goto next_node;
+
if (!mte_is_leaf(mas->node) || !mas_get_slot(mas)) {
*range_start = mas_first_entry(mas, limit);
if (mas_is_none(mas))
return NULL;
}
- if (mas_next_nentry(mas, limit, range_start)) {
+ if (mas_next_nentry(mas, limit, range_start))
break;
- }
+ if (*range_start > limit)
+ return NULL;
+
+next_node:
p_slot = mte_parent_slot(mas->node);
mas_set_slot(mas, p_slot);
mas_next_node(mas, limit);
* trees.
*
*/
-static inline void *mas_next(struct ma_state *mas, unsigned long max)
+void *mas_next(struct ma_state *mas, unsigned long max)
{
unsigned long index = 0;
mas_set_slot(mas, mt_slot_count(mas->node) - 1);
}
- if (mas->node == MAS_NONE)
+ if (mas_is_none(mas))
return NULL;
mas->last = max;
return 1;
}
+static inline bool mas_search_cont(struct ma_state *mas, unsigned long index,
+ unsigned long max, void *entry)
+{
+ if (index >= max)
+ return false;
+
+ if (!mas_searchable(mas))
+ return false;
+
+ if (mas_is_err(mas))
+ return false;
+
+ if (entry)
+ return false;
+
+ return true;
+}
/**
* mas_find: If mas->node == MAS_START, find the first
* non-NULL entry >= mas->index.
*/
void *mas_find(struct ma_state *mas, unsigned long max)
{
- unsigned long index;
+ unsigned long index = mas->min;
void *entry = NULL;
- entry = _mas_next(mas, max, &index);
+ while (mas_search_cont(mas, index, max, entry)) {
+ entry = _mas_next(mas, max, &index);
+ if (mt_is_empty(entry))
+ entry = NULL;
+ }
if (entry)
mas->index = index;
void *mt_find(struct maple_tree *mt, unsigned long *index, unsigned long max,
bool start)
{
- unsigned long range_start = 0;
+ unsigned long range_start = 0, range_end = 0;
void *entry = NULL;
+ bool leaf;
+ unsigned char slot;
MA_STATE(mas, mt, *index, *index);
rcu_read_lock();
- if (!start)
- _mas_walk(&mas);
+ leaf = _mas_range_walk(&mas, &range_start, &range_end);
+ slot = mas_get_slot(&mas);
+ if (leaf == true && slot != MAPLE_NODE_SLOTS)
+ entry = mte_get_rcu_slot(mas.node, slot);
- if (start)
- entry = _mas_next(&mas, max, &range_start);
+ mas.last = range_end;
+
+ if (mt_is_empty(entry) || xa_is_zero(entry))
+ entry = NULL;
+ while (mas_search_cont(&mas, range_start, max, entry)) {
+ mas.last = range_start;
+ entry = _mas_next(&mas, max, &range_start);
+ if (mt_is_empty(entry) || xa_is_zero(entry))
+ entry = NULL;
+ }
+
if (entry)
*index = mas.last + 1;
+
rcu_read_unlock();
return entry;
}
EXPORT_SYMBOL(mtree_destroy);
-
+#define CONFIG_DEBUG_MAPLE_TREE
#ifdef CONFIG_DEBUG_MAPLE_TREE
void mt_dump_node(void *entry, unsigned long min, unsigned long max,
unsigned int depth);
else if (entry)
mt_dump_node(entry, 0, mt_max[mte_node_type(entry)], 0);
}
-
+#ifndef __KERNEL__
extern void kmem_cache_set_non_kernel(struct kmem_cache *, unsigned int);
void mt_set_non_kernel(unsigned int val)
{
{
return kmem_cache_get_alloc(maple_node_cache);
}
-
+#endif
// Tree validations
/**
unsigned long gap = 0, max_gap = 0;
unsigned long p_end, p_start = mas->min;
unsigned char p_slot;
+ int i;
if (mte_is_dense(mte)) {
- for (int i = 0; i < mt_slot_count(mte); i++) {
+ for (i = 0; i < mt_slot_count(mte); i++) {
if (!mt_is_empty(mte_get_rcu_slot(mas->node, i))) {
if (gap > max_gap)
max_gap = gap;
goto counted;
}
- for (int i = 0; i < mt_slot_count(mte); i++) {
+ for(i = 0; i < mt_slot_count(mte); i++) {
p_end = mas_get_safe_pivot(mas, i);
if (!p_end && i != 0)
break;
/**
* Validate all pivots are within mas->min and mas->max.
*/
-void mas_validate_limits(struct ma_state *mas) {
+void mas_validate_limits(struct ma_state *mas)
+{
+ int i;
if (mte_is_root(mas->node))
return; // all limits are fine here.
- for (int i = 0; i < mt_slot_count(mas->node); i++) {
+ for (i = 0; i < mt_slot_count(mas->node); i++) {
unsigned long piv = mas_get_safe_pivot(mas, i);
if (!piv)
break;
val = 1;
}
+ mas_reset(&mas);
+ index = 17;
+ entry = mt_find(mt, &index, 512, true);
+ MT_BUG_ON(mt, xa_mk_value(256) != entry);
+
+ mas_reset(&mas);
+ index = 17;
+ entry = mt_find(mt, &index, 20, true);
+ MT_BUG_ON(mt, entry != NULL);
+
+
// Range check..
// Insert ULONG_MAX
MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
13003, 13002, 13008, 13012, 13015,
14003, 14002, 14008, 14012, 14015,
15003, 15002, 15008, 15012, 15015,
+ };
+
- };
void *ptr = &set;
void *entry[2] = { ptr, mt };
void *root_node;
erase_check_insert(mt, 8);
erase_check_insert(mt, 9);
erase_check_erase(mt, 8);
+
+
}
+#define erase_check_store_range(mt, a, i, ptr) mtree_test_store_range(mt, a[i],\
+ a[i + 1], ptr)
+static noinline void check_erase2_testset(struct maple_tree *mt)
+{
+#define STORE 1
+#define ERASE 2
+ unsigned long set[] = {
+STORE, 140737488347136, 140737488351231,
+STORE, 140721266458624, 140737488351231,
+ERASE, 140721266458624, 140737488351231,
+STORE, 140721266458624, 140721266462719,
+STORE, 94735788949504, 94735789121535,
+ERASE, 94735788949504, 94735789121535,
+STORE, 94735788949504, 94735788965887,
+STORE, 94735788965888, 94735789121535,
+ERASE, 94735788965888, 94735789121535,
+STORE, 94735788965888, 94735789068287,
+STORE, 94735789068288, 94735789109247,
+STORE, 94735789109248, 94735789121535,
+STORE, 140253902692352, 140253902864383,
+ERASE, 140253902692352, 140253902864383,
+STORE, 140253902692352, 140253902696447,
+STORE, 140253902696448, 140253902864383,
+ };
+ int entry_cnt = 0;
+ int check = 0;
+ void *foo;
+ unsigned long addr = 0;
+
+ mt_dump(mt);
+ for (int i = 0; i < ARRAY_SIZE(set); i+=3){
+ switch(set[i]) {
+ case STORE:
+ if (!mtree_test_insert_range(mt, set[i + 1],
+ set[i + 2], &set))
+ entry_cnt++;
+ else
+ erase_check_store_range(mt, set, i + 1, &set);
+ break;
+ case ERASE:
+ check_erase(mt, set[i+1], &set);
+ entry_cnt--;
+ break;
+ }
+ mt_dump(mt);
+ }
+
+ mt_for_each(mt, foo, addr, ULONG_MAX) {
+ check++;
+ }
+
+ MT_BUG_ON(mt, check != entry_cnt);
+}
static noinline void check_alloc_rev_range(struct maple_tree *mt)
{
/* Generated by:
mtree_init(&tree, 0);
check_erase_testset(&tree);
mtree_destroy(&tree);
+ mtree_init(&tree, 0);
+ check_erase2_testset(&tree);
+ mtree_destroy(&tree);
mtree_init(&tree, 0);
/*