#define MTREE_ALLOC_MAX 0x2000000000000Ul
#define CONFIG_DEBUG_MAPLE_TREE
#define CONFIG_MAPLE_SEARCH
+#define MAPLE_32BIT (MAPLE_NODE_SLOTS > 31)
+
/* #define BENCH_SLOT_STORE */
/* #define BENCH_NODE_STORE */
/* #define BENCH_AWALK */
return mtree_erase(mt, index);
}
+#if defined(CONFIG_64BIT)
static noinline void check_mtree_alloc_range(struct maple_tree *mt,
unsigned long start, unsigned long end, unsigned long size,
unsigned long expected, int eret, void *ptr)
MT_BUG_ON(mt, result != expected);
}
+#endif
static noinline void check_load(struct maple_tree *mt, unsigned long index,
void *ptr)
for (i = total; i > 0; i--) {
unsigned int e = 0; /* expected node_count */
- if (i >= 35)
- e = i - 35;
- else if (i >= 5)
- e = i - 5;
- else if (i >= 2)
- e = i - 2;
+ if (!MAPLE_32BIT) {
+ if (i >= 35)
+ e = i - 35;
+ else if (i >= 5)
+ e = i - 5;
+ else if (i >= 2)
+ e = i - 2;
+ } else {
+ if (i >= 4)
+ e = i - 4;
+ else if (i == 3)
+ e = i - 2;
+ else
+ e = 0;
+ }
+
MT_BUG_ON(mt, mas.alloc->node_count != e);
mn = mas_pop_node(&mas);
MT_BUG_ON(mt, not_empty(mn));
static noinline void check_upper_bound_split(struct maple_tree *mt)
{
unsigned long i, j;
- unsigned long huge = 4000UL * 1000 * 1000;
+ unsigned long huge;
MT_BUG_ON(mt, !mtree_empty(mt));
+ if (MAPLE_32BIT)
+ huge = 2147483647UL;
+ else
+ huge = 4000UL * 1000 * 1000;
+
i = 4096;
while (i < huge) {
check_insert(mt, i, (void *) i);
/* Insert 0. */
MT_BUG_ON(mt, mtree_insert_index(mt, val++, GFP_KERNEL));
+ if (MAPLE_32BIT)
+ count = 15;
+ else
+ count = 20;
+
for (int i = 0; i <= count; i++) {
if (val != 64)
MT_BUG_ON(mt, mtree_insert_index(mt, val, GFP_KERNEL));
index = 0;
MT_BUG_ON(mt, mtree_insert_index(mt, ULONG_MAX, GFP_KERNEL));
mt_for_each(mt, entry, index, ULONG_MAX) {
- if (val == 4398046511104)
+ if ((val == 4398046511104) || (val == ULONG_MAX))
MT_BUG_ON(mt, entry !=
xa_mk_value(ULONG_MAX & LONG_MAX));
else
MT_BUG_ON(mt, xa_mk_value(val) != entry);
- val <<= 2;
+
+ /* Workaround for 32bit */
+ if ((val << 2) < val)
+ val = ULONG_MAX;
+ else
+ val <<= 2;
+
if (val == 64) /* Skip zero entry. */
val <<= 2;
/* For zero check. */
mas_for_each(&mas, entry, ULONG_MAX) {
if (val == 64)
MT_BUG_ON(mt, entry != XA_ZERO_ENTRY);
- else if (val == 4398046511104)
+ else if ((val == 4398046511104) || (val == ULONG_MAX))
MT_BUG_ON(mt, entry != xa_mk_value(ULONG_MAX & LONG_MAX));
else
MT_BUG_ON(mt, xa_mk_value(val) != entry);
- val <<= 2;
+ /* Workaround for 32bit */
+ if ((val << 2) < val)
+ val = ULONG_MAX;
+ else
+ val <<= 2;
/* For zero check. */
if (!val)
return entry;
}
+#if defined(CONFIG_64BIT)
static noinline void check_erase2_testset(struct maple_tree *mt,
unsigned long *set, unsigned long size)
{
mtree_destroy(mt);
}
+#endif
static noinline void check_ranges(struct maple_tree *mt)
{
MT_BUG_ON(mt, mt_height(mt) >= 4);
for (i = 5; i < 45; i += 10)
check_store_range(mt, 11700 + i, 11700 + i + 1, NULL, 0);
- MT_BUG_ON(mt, mt_height(mt) < 4);
+ if (!MAPLE_32BIT)
+ MT_BUG_ON(mt, mt_height(mt) < 4);
mtree_destroy(mt);
MT_BUG_ON(mt, mt_height(mt) >= 4);
/* triple split across multiple levels. */
check_store_range(mt, 8184, 8184, xa_mk_value(8184), 0);
- MT_BUG_ON(mt, mt_height(mt) != 4);
+ if (!MAPLE_32BIT)
+ MT_BUG_ON(mt, mt_height(mt) != 4);
}
static noinline void check_next_entry(struct maple_tree *mt)
for (i = 0; i <= max; i++)
mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
+ mas_lock(&mas);
/* Test expanding null at start. */
mas_walk(&mas);
data_end = mas_data_end(&mas);
mas_store_gfp(&mas, NULL, GFP_KERNEL);
MT_BUG_ON(mt, mtree_load(mt, 884) != NULL);
MT_BUG_ON(mt, mtree_load(mt, 889) != NULL);
+#if CONFIG_64BIT
MT_BUG_ON(mt, data_end != mas_data_end(&mas));
+#endif
/* Test expanding null at start and end. */
mas_set(&mas, 890);
MT_BUG_ON(mt, mtree_load(mt, 900) != NULL);
MT_BUG_ON(mt, mtree_load(mt, 905) != NULL);
MT_BUG_ON(mt, mtree_load(mt, 906) != NULL);
+#if CONFIG_64BIT
MT_BUG_ON(mt, data_end - 2 != mas_data_end(&mas));
+#endif
/* Test expanding null across multiple slots. */
mas_set(&mas, 800);
MT_BUG_ON(mt, mtree_load(mt, 810) != NULL);
MT_BUG_ON(mt, mtree_load(mt, 825) != NULL);
MT_BUG_ON(mt, mtree_load(mt, 826) != NULL);
+#if CONFIG_64BIT
MT_BUG_ON(mt, data_end - 4 != mas_data_end(&mas));
+#endif
+ mas_unlock(&mas);
}
static noinline void check_gap_combining(struct maple_tree *mt)
{
struct maple_enode *mn1, *mn2;
void *entry;
+ unsigned long singletons = 100;
- unsigned long seq100[] = {
+ unsigned long *seq100;
+ unsigned long seq100_64[] = {
/* 0-5 */
74, 75, 76,
50, 100, 2,
80, 81, 82,
76, 2, 79, 85, 4,
};
+
+ unsigned long seq100_32[] = {
+ /* 0-5 */
+ 61, 62, 63,
+ 50, 100, 2,
+
+ /* 6-12 */
+ 31, 32, 33, 30,
+ 20, 50, 3,
+
+ /* 13-20*/
+ 80, 81, 82,
+ 76, 2, 79, 85, 4,
+ };
+
unsigned long seq2000[] = {
1152, 1151,
1100, 1200, 2,
286, 310,
};
- unsigned long index = seq100[0];
+ unsigned long index;
- MA_STATE(mas, mt, index, index);
+ MA_STATE(mas, mt, 0, 0);
+
+ if (MAPLE_32BIT)
+ seq100 = seq100_32;
+ else
+ seq100 = seq100_64;
+ index = seq100[0];
+ mas_set(&mas, index);
MT_BUG_ON(mt, !mtree_empty(mt));
- check_seq(mt, 100, false); /* create 100 singletons. */
+ check_seq(mt, singletons, false); /* create 100 singletons. */
mt_set_non_kernel(1);
mtree_test_erase(mt, seq100[2]);
static void check_dfs_preorder(struct maple_tree *mt)
{
- unsigned long count = 0, max = 1000;
+ unsigned long e, count = 0, max = 1000;
MA_STATE(mas, mt, 0, 0);
+ if (MAPLE_32BIT)
+ e = 37;
+ else
+ e = 74;
+
check_seq(mt, max, false);
do {
count++;
mas_dfs_preorder(&mas);
} while (!mas_is_none(&mas));
- MT_BUG_ON(mt, count != 74);
+ MT_BUG_ON(mt, count != e);
mtree_destroy(mt);
mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mas_reset(&mas);
count = 0;
+ if (!MAPLE_32BIT)
+ e = 77;
+
check_seq(mt, max, false);
do {
count++;
mas_dfs_preorder(&mas);
} while (!mas_is_none(&mas));
/*printk("count %lu\n", count); */
- MT_BUG_ON(mt, count != 77);
+ MT_BUG_ON(mt, count != e);
mtree_destroy(mt);
mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mas_dfs_preorder(&mas);
} while (!mas_is_none(&mas));
/*printk("count %lu\n", count); */
- MT_BUG_ON(mt, count != 77);
+ MT_BUG_ON(mt, count != e);
mtree_destroy(mt);
mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
static noinline void next_prev_test(struct maple_tree *mt)
{
- int i, nr_entries = 200;
+ int i, nr_entries;
void *val;
MA_STATE(mas, mt, 0, 0);
struct maple_enode *mn;
+ unsigned long *level2;
+ unsigned long level2_64[] = {707, 1000, 710, 715, 720, 725};
+ unsigned long level2_32[] = {1747, 2000, 1750, 1755, 1760, 1765};
+
+ if (MAPLE_32BIT) {
+ nr_entries = 500;
+ level2 = level2_32;
+ } else {
+ nr_entries = 200;
+ level2 = level2_64;
+ }
for (i = 0; i <= nr_entries; i++)
mtree_store_range(mt, i*10, i*10 + 5,
/* Check across two levels of the tree */
mas_reset(&mas);
- mas_set(&mas, 707);
+ mas_set(&mas, level2[0]);
val = mas_walk(&mas);
MT_BUG_ON(mt, val != NULL);
- val = mas_next(&mas, 1000);
- MT_BUG_ON(mt, val != xa_mk_value(710 / 10));
- MT_BUG_ON(mt, mas.index != 710);
- MT_BUG_ON(mt, mas.last != 715);
+ val = mas_next(&mas, level2[1]);
+ MT_BUG_ON(mt, val != xa_mk_value(level2[2] / 10));
+ MT_BUG_ON(mt, mas.index != level2[2]);
+ MT_BUG_ON(mt, mas.last != level2[3]);
mn = mas.node;
- val = mas_next(&mas, 1000);
- MT_BUG_ON(mt, val != xa_mk_value(720 / 10));
- MT_BUG_ON(mt, mas.index != 720);
- MT_BUG_ON(mt, mas.last != 725);
+ val = mas_next(&mas, level2[1]);
+ MT_BUG_ON(mt, val != xa_mk_value(level2[4] / 10));
+ MT_BUG_ON(mt, mas.index != level2[4]);
+ MT_BUG_ON(mt, mas.last != level2[5]);
MT_BUG_ON(mt, mn == mas.node);
val = mas_prev(&mas, 0);
- MT_BUG_ON(mt, val != xa_mk_value(710 / 10));
- MT_BUG_ON(mt, mas.index != 710);
- MT_BUG_ON(mt, mas.last != 715);
+ MT_BUG_ON(mt, val != xa_mk_value(level2[2]/ 10));
+ MT_BUG_ON(mt, mas.index != level2[2]);
+ MT_BUG_ON(mt, mas.last != level2[3]);
/* Check running off the end and back on */
mas_reset(&mas);
- mas_set(&mas, 2000);
+ mas_set(&mas, nr_entries * 10);
val = mas_walk(&mas);
- MT_BUG_ON(mt, val != xa_mk_value(2000 / 10));
- MT_BUG_ON(mt, mas.index != 2000);
- MT_BUG_ON(mt, mas.last != 2005);
+ MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
+ MT_BUG_ON(mt, mas.index != (nr_entries * 10));
+ MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
val = mas_next(&mas, ULONG_MAX);
MT_BUG_ON(mt, val != NULL);
MT_BUG_ON(mt, mas.last != ULONG_MAX);
val = mas_prev(&mas, 0);
- MT_BUG_ON(mt, val != xa_mk_value(2000 / 10));
- MT_BUG_ON(mt, mas.index != 2000);
- MT_BUG_ON(mt, mas.last != 2005);
+ MT_BUG_ON(mt, val != xa_mk_value(nr_entries));
+ MT_BUG_ON(mt, mas.index != (nr_entries * 10));
+ MT_BUG_ON(mt, mas.last != (nr_entries * 10 + 5));
/* Check running off the start and back on */
mas_reset(&mas);
* Also discovered issue with metadata setting.
*/
mt_init_flags(mt, 0);
- mtree_test_store_range(mt, 0, 18446744073709551615UL, (void *)0x1);
+ mtree_test_store_range(mt, 0, ULONG_MAX, (void *)0x1);
mtree_test_store(mt, 4, (void *)0x9);
mtree_test_erase(mt, 5);
mtree_test_erase(mt, 0);
mtree_test_insert(mt, 8, (void *)0x11);
mtree_test_insert(mt, 4, (void *)0x9);
mtree_test_insert(mt, 2480, (void *)0x1361);
- mtree_test_insert(mt, 18446744073709551615UL,
+ mtree_test_insert(mt, ULONG_MAX,
(void *)0xffffffffffffffff);
- mtree_test_erase(mt, 18446744073709551615UL);
+ mtree_test_erase(mt, ULONG_MAX);
mtree_destroy(mt);
/*
mtree_test_insert(mt, 8, (void *)0x11);
mtree_test_insert(mt, 21, (void *)0x2b);
mtree_test_insert(mt, 2, (void *)0x5);
- mtree_test_insert(mt, 18446744073709551605UL, (void *)0xffffffffffffffeb);
- mtree_test_erase(mt, 18446744073709551605UL);
+ mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
+ mtree_test_erase(mt, ULONG_MAX - 10);
mtree_test_store_range(mt, 0, 281, (void *)0x1);
mtree_test_erase(mt, 2);
mtree_test_insert(mt, 1211, (void *)0x977);
mtree_test_insert(mt, 8, (void *)0x11);
mtree_test_insert(mt, 21, (void *)0x2b);
mtree_test_insert(mt, 2, (void *)0x5);
- mtree_test_insert(mt, 18446744073709551605UL, (void *)0xffffffffffffffeb);
- mtree_test_erase(mt, 18446744073709551605UL);
+ mtree_test_insert(mt, ULONG_MAX - 10, (void *)0xffffffffffffffeb);
+ mtree_test_erase(mt, ULONG_MAX - 10);
}
static noinline void check_dup_gaps(struct maple_tree *mt,
unsigned long nr_entries, bool zero_start,
check_ranges(&tree);
mtree_destroy(&tree);
- mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
- check_alloc_range(&tree);
- mtree_destroy(&tree);
-
- mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
- check_alloc_rev_range(&tree);
- mtree_destroy(&tree);
-
mt_init_flags(&tree, 0);
check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
check_prev_entry(&tree);
mtree_destroy(&tree);
- mt_init_flags(&tree, 0);
- check_erase2_sets(&tree);
- mtree_destroy(&tree);
-
mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_gap_combining(&tree);
mtree_destroy(&tree);
next_prev_test(&tree);
mtree_destroy(&tree);
+#if defined(CONFIG_64BIT)
+ /* These tests have ranges outside of 4GB */
mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
- check_rcu_simulated(&tree);
+ check_alloc_range(&tree);
mtree_destroy(&tree);
mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
- check_rcu_threaded(&tree);
+ check_alloc_rev_range(&tree);
+ mtree_destroy(&tree);
+
+ mt_init_flags(&tree, 0);
+ check_erase2_sets(&tree);
mtree_destroy(&tree);
+#endif
+
+ if (!MAPLE_32BIT) {
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
+ check_rcu_simulated(&tree);
+ mtree_destroy(&tree);
+
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
+ check_rcu_threaded(&tree);
+ mtree_destroy(&tree);
+ }
mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_spanning_relatives(&tree);
atomic_read(&maple_tree_tests_passed),
atomic_read(&maple_tree_tests_run));
if (atomic_read(&maple_tree_tests_run) ==
- atomic_read(&maple_tree_tests_passed))
+ atomic_read(&maple_tree_tests_passed))
return 0;
return -EINVAL;