mt_set_non_kernel(0);
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set2, ARRAY_SIZE(set2));
start = 140735933894656;
MT_BUG_ON(mt, !!mt_find(mt, &start, 140735933906943UL));
mtree_destroy(mt);
mt_set_non_kernel(2);
- mtree_init(mt, 0);
+ mt_init_flags(mt, 0);
check_erase2_testset(mt, set3, ARRAY_SIZE(set3));
mt_set_non_kernel(0);
mtree_destroy(mt);
- mtree_init(mt, 0);
+ mt_init_flags(mt, 0);
check_erase2_testset(mt, set4, ARRAY_SIZE(set4));
rcu_read_lock();
mas_for_each(&mas, entry, ULONG_MAX) {
rcu_barrier();
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mt_set_non_kernel(100);
check_erase2_testset(mt, set5, ARRAY_SIZE(set5));
rcu_barrier();
mt_set_non_kernel(0);
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set6, ARRAY_SIZE(set6));
rcu_barrier();
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set7, ARRAY_SIZE(set7));
rcu_barrier();
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set8, ARRAY_SIZE(set8));
rcu_barrier();
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set9, ARRAY_SIZE(set9));
rcu_barrier();
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set10, ARRAY_SIZE(set10));
rcu_barrier();
mtree_destroy(mt);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set11, ARRAY_SIZE(set11));
rcu_barrier();
mas_empty_area_rev(&mas, 12288, 140014592737280, 0x2000);
mas.tree = mt;
count = 0;
mas.index = 0;
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set12, ARRAY_SIZE(set12));
rcu_barrier();
mas_for_each(&mas, entry, ULONG_MAX) {
mtree_destroy(mt);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set13, ARRAY_SIZE(set13));
mtree_erase(mt, 140373516443648);
rcu_read_lock();
mas_empty_area_rev(&mas, 0, 140373518663680, 4096);
rcu_read_unlock();
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set14, ARRAY_SIZE(set14));
rcu_barrier();
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set15, ARRAY_SIZE(set15));
rcu_barrier();
mtree_destroy(mt);
/* set16 was to find a bug on limit updating at slot 0. */
mt_set_non_kernel(99);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set16, ARRAY_SIZE(set16));
rcu_barrier();
mas_empty_area_rev(&mas, 4096, 139921865637888, 0x6000);
*/
mt_set_non_kernel(99);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set17, ARRAY_SIZE(set17));
rcu_barrier();
mas_empty_area_rev(&mas, 4096, 139953197334528, 0x1000);
*/
mt_set_non_kernel(99);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set18, ARRAY_SIZE(set18));
rcu_barrier();
mas_empty_area_rev(&mas, 4096, 140222972858368, 2215936);
*/
mt_set_non_kernel(99);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set19, ARRAY_SIZE(set19));
rcu_barrier();
mas.index = 140656779083776;
* overwritten during the __mas_add operation and setting it to zero.
*/
mt_set_non_kernel(99);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set20, ARRAY_SIZE(set20));
rcu_barrier();
check_load(mt, 94849009414144, NULL);
mtree_destroy(mt);
mt_set_non_kernel(99);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set21, ARRAY_SIZE(set21));
rcu_barrier();
mt_validate(mt);
mtree_destroy(mt);
mt_set_non_kernel(999);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set22, ARRAY_SIZE(set22));
rcu_barrier();
mt_validate(mt);
mtree_destroy(mt);
mt_set_non_kernel(99);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set23, ARRAY_SIZE(set23));
rcu_barrier();
mt_set_non_kernel(0);
mt_set_non_kernel(99);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set24, ARRAY_SIZE(set24));
rcu_barrier();
mt_set_non_kernel(0);
mtree_destroy(mt);
mt_set_non_kernel(99);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set25, ARRAY_SIZE(set25));
rcu_barrier();
mt_set_non_kernel(0);
/* Split on NULL followed by delete - causes gap issues. */
mt_set_non_kernel(99);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set26, ARRAY_SIZE(set26));
rcu_barrier();
mas_empty_area_rev(&mas, 4096, 140109042671616, 409600);
/* Split on NULL followed by delete - causes gap issues. */
mt_set_non_kernel(99);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set27, ARRAY_SIZE(set27));
rcu_barrier();
MT_BUG_ON(mt, 0 != mtree_load(mt, 140415537422336));
mt_set_non_kernel(99);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set28, ARRAY_SIZE(set28));
rcu_barrier();
mas_empty_area_rev(&mas, 4096, 139918413357056, 2097152);
*/
mt_set_non_kernel(999);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set29, ARRAY_SIZE(set29));
rcu_barrier();
mt_set_non_kernel(0);
*/
mt_set_non_kernel(999);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set30, ARRAY_SIZE(set30));
rcu_barrier();
mt_set_non_kernel(0);
*/
mt_set_non_kernel(99);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set31, ARRAY_SIZE(set31));
rcu_barrier();
mt_set_non_kernel(0);
mt_set_non_kernel(99);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set32, ARRAY_SIZE(set32));
rcu_barrier();
mt_set_non_kernel(0);
/* move gap failed due to an entirely empty node */
mt_set_non_kernel(99);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set33, ARRAY_SIZE(set33));
rcu_barrier();
mas_empty_area_rev(&mas, 4096, 140583656296448, 134217728);
*/
mt_set_non_kernel(99);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set34, ARRAY_SIZE(set34));
rcu_barrier();
mt_set_non_kernel(0);
/* Empty leaf at the end of a parent caused incorrect gap. */
mt_set_non_kernel(99);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set35, ARRAY_SIZE(set35));
rcu_barrier();
mt_set_non_kernel(0);
mt_set_non_kernel(99);
/* Empty leaf at the end of a parent caused incorrect gap. */
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set36, ARRAY_SIZE(set36));
rcu_barrier();
mt_set_non_kernel(0);
mtree_destroy(mt);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set37, ARRAY_SIZE(set37));
rcu_barrier();
MT_BUG_ON(mt, 0 != mtree_load(mt, 94637033459712));
mtree_destroy(mt);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set38, ARRAY_SIZE(set38));
rcu_barrier();
MT_BUG_ON(mt, 0 != mtree_load(mt, 94637033459712));
mtree_destroy(mt);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set39, ARRAY_SIZE(set39));
rcu_barrier();
mt_validate(mt);
mtree_destroy(mt);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set40, ARRAY_SIZE(set40));
rcu_barrier();
mt_validate(mt);
mtree_destroy(mt);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set41, ARRAY_SIZE(set41));
rcu_barrier();
mt_validate(mt);
/* move gap failed due to an entirely empty node. */
mt_set_non_kernel(99);
mas_reset(&mas);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_erase2_testset(mt, set42, ARRAY_SIZE(set42));
rcu_barrier();
mas_empty_area_rev(&mas, 4096, 4052029440, 28672);
mtree_destroy(mt);
/* Test rebalance gaps */
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mt_set_non_kernel(50);
for (i = 0; i <= 50; i++) {
val = i*10;
MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
for (i = 0; i <= 500; i++) {
val = i*10;
val2 = (i+1)*10;
MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
for (i = 0; i <= 500; i++) {
val = i*10;
val2 = (i+1)*10;
MT_BUG_ON(mt, !mt_height(mt));
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
for (i = 0; i <= 1300; i++) {
val = i*10;
val2 = (i+1)*10;
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
for (i = 0; i <= 1200; i++) {
val = i*10;
val2 = (i+1)*10;
mtree_destroy(mt);
/* seq 2000 tests are for multi-level tree gaps */
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_seq(mt, 2000, false);
mt_set_non_kernel(1);
mtree_test_erase(mt, seq2000[0]);
/* seq 400 tests rebalancing over two levels. */
mt_set_non_kernel(99);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_seq(mt, 400, false);
mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
mt_set_non_kernel(0);
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
check_seq(mt, 400, false);
mt_set_non_kernel(50);
mtree_test_store_range(mt, seq400[2], seq400[9],
MT_BUG_ON(mt, count != 74);
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mas_reset(&mas);
count = 0;
check_seq(mt, max, false);
MT_BUG_ON(mt, count != 77);
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mas_reset(&mas);
count = 0;
check_rev_seq(mt, max, false);
MT_BUG_ON(mt, count != 77);
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
mas_reset(&mas);
mt_zero_nr_tallocated();
mt_set_non_kernel(200);
xa_mk_value(i), GFP_KERNEL);
mt_set_non_kernel(99999);
- mtree_init(&newmt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
newmas.tree = &newmt;
mas_reset(&newmas);
mas_reset(&mas);
xa_mk_value(i), GFP_KERNEL);
mt_set_non_kernel(99999);
- mtree_init(&newmt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
newmas.tree = &newmt;
mas_reset(&newmas);
mas_set(&mas, 0);
for (i = 0; i < nr_fork; i++) {
mt_set_non_kernel(99999);
- mtree_init(&newmt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&newmt, MT_FLAGS_ALLOC_RANGE);
newmas.tree = &newmt;
mas_reset(&newmas);
mas_reset(&mas);
run_check_rcu(mt, &vals);
mtree_destroy(mt);
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
for (i = 0; i <= nr_entries; i++)
mtree_store_range(mt, i*10, i*10 + 5,
xa_mk_value(i), GFP_KERNEL);
/* Forward writer for rcu stress */
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
rcu_stress(mt, true);
mtree_destroy(mt);
/* Reverse writer for rcu stress */
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
rcu_stress(mt, false);
mtree_destroy(mt);
/* Slow reader test with spanning store. */
- mtree_init(mt, MAPLE_ALLOC_RANGE);
+ mt_init_flags(mt, MT_FLAGS_ALLOC_RANGE);
for (i = 0; i <= nr_entries; i++)
mtree_store_range(mt, i*10, i*10 + 5,
xa_mk_value(i), GFP_KERNEL);
#if defined(BENCH_SLOT_STORE)
#define BENCH
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
bench_slot_store(&tree);
mtree_destroy(&tree);
goto skip;
#endif
#if defined(BENCH_NODE_STORE)
#define BENCH
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
bench_node_store(&tree);
mtree_destroy(&tree);
goto skip;
#endif
#if defined(BENCH_AWALK)
#define BENCH
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
bench_awalk(&tree);
mtree_destroy(&tree);
goto skip;
#endif
#if defined(BENCH_WALK)
#define BENCH
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
bench_walk(&tree);
mtree_destroy(&tree);
goto skip;
#endif
#if defined(BENCH_FORK)
#define BENCH
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
bench_forking(&tree);
mtree_destroy(&tree);
goto skip;
test_kmem_cache_bulk();
- mtree_init(&tree, 0);
+ mt_init_flags(&tree, 0);
check_new_node(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, 0);
+ mt_init_flags(&tree, 0);
check_dfs_preorder(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_forking(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_mas_store_gfp(&tree);
mtree_destroy(&tree);
/* Test ranges (store and insert) */
- mtree_init(&tree, 0);
+ mt_init_flags(&tree, 0);
check_ranges(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_alloc_range(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_alloc_rev_range(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, 0);
+ mt_init_flags(&tree, 0);
check_load(&tree, set[0], NULL); /* See if 5015 -> NULL */
mtree_destroy(&tree);
/* Try to insert, insert a dup, and load back what was inserted. */
- mtree_init(&tree, 0);
+ mt_init_flags(&tree, 0);
check_insert(&tree, set[0], &tree); /* Insert 5015 */
check_dup_insert(&tree, set[0], &tree); /* Insert 5015 again */
check_load(&tree, set[0], &tree); /* See if 5015 -> &tree */
/* Clear out tree */
mtree_destroy(&tree);
- mtree_init(&tree, 0);
+ mt_init_flags(&tree, 0);
/* Test inserting into a NULL hole. */
check_insert(&tree, set[5], ptr); /* insert 1001 -> ptr */
check_insert(&tree, set[7], &tree); /* insert 1003 -> &tree */
/* Clear out the tree */
mtree_destroy(&tree);
- mtree_init(&tree, 0);
+ mt_init_flags(&tree, 0);
check_erase_testset(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, 0);
+ mt_init_flags(&tree, 0);
/*
* set[] = {5015, 5014, 5017, 25, 1000,
* 1001, 1002, 1003, 1005, 0,
mtree_destroy(&tree);
check_nomem(&tree);
- mtree_init(&tree, 0);
+ mt_init_flags(&tree, 0);
check_seq(&tree, 16, false);
mtree_destroy(&tree);
- mtree_init(&tree, 0);
+ mt_init_flags(&tree, 0);
check_seq(&tree, 1000, true);
mtree_destroy(&tree);
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_rev_seq(&tree, 1000, true);
mtree_destroy(&tree);
check_upper_bound_split(&tree);
check_mid_split(&tree);
- mtree_init(&tree, 0);
+ mt_init_flags(&tree, 0);
check_next_entry(&tree);
check_find(&tree);
check_find_2(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_prev_entry(&tree);
- mtree_init(&tree, 0);
+ mt_init_flags(&tree, 0);
check_erase2_sets(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_gap_combining(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_node_overwrite(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
next_prev_test(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_rcu_simulated(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_rcu_threaded(&tree);
mtree_destroy(&tree);
- mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ mt_init_flags(&tree, MT_FLAGS_ALLOC_RANGE);
check_spanning_relatives(&tree);
mtree_destroy(&tree);