mtree_destroy(&newmt);
}
+static noinline void check_mas_store_gfp(struct maple_tree *mt)
+{
+
+ struct maple_tree newmt;
+ int i, nr_entries = 135;
+ void *val;
+ MA_STATE(mas, mt, 0, 0);
+ MA_STATE(newmas, mt, 0, 0);
+
+ for (i = 0; i <= nr_entries; i++)
+ mtree_store_range(mt, i*10, i*10 + 5,
+ xa_mk_value(i), GFP_KERNEL);
+
+ mt_set_non_kernel(99999);
+ mtree_init(&newmt, MAPLE_ALLOC_RANGE);
+ newmas.tree = &newmt;
+ mas_reset(&newmas);
+ mas_set(&mas, 0);
+ mas_for_each(&mas, val, ULONG_MAX) {
+ newmas.index = mas.index;
+ newmas.last = mas.last;
+ mas_store_gfp(&newmas, val, GFP_KERNEL);
+ }
+ printk("alloc is %px\n", newmas.alloc);
+ mt_validate(&newmt);
+ mt_set_non_kernel(0);
+ mtree_destroy(&newmt);
+}
+
#if defined(BENCH_FORK)
static noinline void bench_forking(struct maple_tree *mt)
{
check_forking(&tree);
mtree_destroy(&tree);
+ mtree_init(&tree, MAPLE_ALLOC_RANGE);
+ check_mas_store_gfp(&tree);
+ mtree_destroy(&tree);
+
/* Test ranges (store and insert) */
mtree_init(&tree, 0);
check_ranges(&tree);