for (i = 0; i < ARRAY_SIZE(holes); i += 3) {
#if DEBUG_ALLOC_RANGE
- pr_debug("\tGet empty %lu-%lu size %lu\n", min >> 12,
- holes[i+1] >> 12, holes[i+2] >> 12);
+ pr_debug("\tGet empty %lu-%lu size %lu (%lx-%lx)\n", min >> 12,
+ holes[i+1] >> 12, holes[i+2] >> 12,
+ min, holes[i+1]);
#endif
MT_BUG_ON(mt, mas_get_empty_area(&mas, min >> 12,
holes[i+1] >> 12,
}
for (i = 0; i < req_range_cnt; i += 5) {
#if DEBUG_ALLOC_RANGE
- pr_debug("\tTest %d: %lu-%lu size %lu expected %lu\n", i/5,
- req_range[i] >> 12,
- req_range[i + 1] >> 12,
- req_range[i + 2] >> 12,
- req_range[i + 3] >> 12);
+ pr_debug("\tTest %d: %lu-%lu size %lu expected %lu (%lu-%lu)\n",
+ i/5, req_range[i] >> 12, req_range[i + 1] >> 12,
+ req_range[i + 2] >> 12, req_range[i + 3] >> 12,
+ req_range[i], req_range[i+1]);
#endif
check_mtree_alloc_range(mt,
req_range[i] >> 12, // start
req_range[i+4], // expected return
xa_mk_value(req_range[i] >> 12)); // pointer
mt_validate(mt);
+#if DEBUG_ALLOC_RANGE
+ mt_dump(mt);
+#endif
}
mtree_destroy(mt);
{
struct maple_enode *mn1, *mn2;
void *entry;
- unsigned long index = 82;
+
+#if defined(NODE256)
+ unsigned long seq100[] = {
+ /* 0-5 */
+ 78, 79, 80,
+ 50, 100, 2,
+
+ /* 6-12 */
+ 44, 45, 46, 43,
+ 20, 50, 3,
+
+ /* 13-20*/
+ 80, 81, 82,
+ 76, 2, 79, 85, 4,
+ };
+ unsigned long seq2000[] = {
+ 1152, 1151,
+ 1100, 1200, 2,
+ };
+ unsigned long seq400[] = {
+ 286, 318,
+ 256, 260, 266, 270, 275, 280, 290, 398,
+ 286, 310,
+ };
+#else
+ // Seq test 100 data
+ unsigned long seq100[] = {
+ 82, 83, 84,
+ 50, 100, 2,
+
+ 38, 39, 40, 37,
+ 20, 50, 3,
+
+ 79, 80, 81,
+ 76, 2, 78, 85, 4,
+ };
+ // seq test 2000 data
+ unsigned long seq2000[] = {
+ 1792, 1791,
+ 1700, 1800, 2,
+ };
+
+ // seq test 400 data
+ unsigned long seq400[] = {
+ 376, 391,
+ 352, 356, 360, 364, 378, 384, 388, 398,
+ 374, 375,
+ };
+#endif
+
+ unsigned long index = seq100[0];
MA_STATE(mas, mt, index, index);
check_seq(mt, 100, false); // create 100 singletons.
mt_set_non_kernel(1);
- mtree_test_erase(mt, 83);
- check_load(mt, 83, NULL);
- mtree_test_erase(mt, 84);
- check_load(mt, 84, NULL);
+ mtree_test_erase(mt, seq100[2]);
+ check_load(mt, seq100[2], NULL);
+ mtree_test_erase(mt, seq100[1]);
+ check_load(mt, seq100[1], NULL);
rcu_read_lock();
entry = mas_find(&mas, ULONG_MAX);
mn2 = mas.node;
MT_BUG_ON(mt, mn1 == mn2); // test the test.
- /* At this point, there is a gap of 2 at index + 1 between 50-100.
- * Search for the gap.
+ /* At this point, there is a gap of 2 at index + 1 between seq100[3] and
+ * seq100[4]. Search for the gap.
*/
mt_set_non_kernel(1);
mas_reset(&mas);
- MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, 50, 100, 2));
+ MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, seq100[3], seq100[4],
+ seq100[5]));
MT_BUG_ON(mt, mas.index != index + 1);
rcu_read_unlock();
- mtree_test_erase(mt, 38);
- check_load(mt, 38, NULL);
- mtree_test_erase(mt, 39);
- check_load(mt, 39, NULL);
- mtree_test_erase(mt, 40);
- index = 37;
+ mtree_test_erase(mt, seq100[6]);
+ check_load(mt, seq100[6], NULL);
+ mtree_test_erase(mt, seq100[7]);
+ check_load(mt, seq100[7], NULL);
+ mtree_test_erase(mt, seq100[8]);
+ index = seq100[9];
rcu_read_lock();
mas.index = index;
mn1 = mas.node;
entry = mas_next(&mas, ULONG_MAX);
MT_BUG_ON(mt, entry != xa_mk_value(index + 4));
- mas_next(&mas, ULONG_MAX); // go to the next node.
+ mas_next(&mas, ULONG_MAX); // go to the next entry.
mn2 = mas.node;
- MT_BUG_ON(mt, mn1 == mn2);
+ MT_BUG_ON(mt, mn1 == mn2); // test the next entry is in the next node.
- /* At this point, there is a gap of 3 at 38. Find it by searching 20 -
+ /* At this point, there is a gap of 3 at seq100[6]. Find it by searching 20 -
* 50 for size 3.
*/
mas_reset(&mas);
- MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, 20, 50, 3));
- MT_BUG_ON(mt, mas.index != 38);
+ MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, seq100[10], seq100[11],
+ seq100[12]));
+ MT_BUG_ON(mt, mas.index != seq100[6]);
rcu_read_unlock();
mt_set_non_kernel(1);
- mtree_store(mt, 79, NULL, GFP_KERNEL);
- check_load(mt, 79, NULL);
- check_load(mt, 80, xa_mk_value(80));
- mtree_store(mt, 80, NULL, GFP_KERNEL);
- check_load(mt, 79, NULL);
- check_load(mt, 80, NULL);
+ mtree_store(mt, seq100[13], NULL, GFP_KERNEL);
+ check_load(mt, seq100[13], NULL);
+ check_load(mt, seq100[14], xa_mk_value(seq100[14]));
+ mtree_store(mt, seq100[14], NULL, GFP_KERNEL);
+ check_load(mt, seq100[13], NULL);
+ check_load(mt, seq100[14], NULL);
mas_reset(&mas);
rcu_read_lock();
- MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, 76, 81, 2));
- MT_BUG_ON(mt, mas.index != 79);
+ MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, seq100[16], seq100[15],
+ seq100[17]));
+ MT_BUG_ON(mt, mas.index != seq100[13]);
mt_validate(mt);
rcu_read_unlock();
- // Test retry entry in the start of a gap.
+ // *DEPRECATED: no retries anymore* Test retry entry in the start of a gap.
mt_set_non_kernel(2);
- mtree_test_store_range(mt, 78, 80, NULL);
- mtree_test_erase(mt, 81);
+ mtree_test_store_range(mt, seq100[18], seq100[14], NULL);
+ mtree_test_erase(mt, seq100[15]);
mas_reset(&mas);
rcu_read_lock();
- MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, 76, 85, 4));
+ MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, seq100[16], seq100[19],
+ seq100[20]));
rcu_read_unlock();
- MT_BUG_ON(mt, mas.index != 78);
+ MT_BUG_ON(mt, mas.index != seq100[18]);
mt_validate(mt);
mtree_destroy(mt);
+ /* seq 2000 tests are for multi-level tree gaps */
mtree_init(mt, MAPLE_ALLOC_RANGE);
check_seq(mt, 2000, false);
mt_set_non_kernel(1);
- mtree_test_erase(mt, 1792);
- mtree_test_erase(mt, 1791);
+ mtree_test_erase(mt, seq2000[0]);
+ mtree_test_erase(mt, seq2000[1]);
mt_set_non_kernel(2);
mas_reset(&mas);
rcu_read_lock();
- MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, 1700, 1800, 2));
- MT_BUG_ON(mt, mas.index != 1791);
+ MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, seq2000[2], seq2000[3],
+ seq2000[4]));
+ MT_BUG_ON(mt, mas.index != seq2000[1]);
rcu_read_unlock();
mt_validate(mt);
mtree_destroy(mt);
+ /* seq 400 tests rebalancing over two levels. */
mt_set_non_kernel(99);
mtree_init(mt, MAPLE_ALLOC_RANGE);
check_seq(mt, 400, false);
- mtree_test_store_range(mt, 376, 391, NULL);
+ mtree_test_store_range(mt, seq400[0], seq400[1], NULL);
mt_set_non_kernel(0);
mtree_destroy(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
check_seq(mt, 400, false);
mt_set_non_kernel(50);
- mtree_test_store_range(mt, 352, 398, xa_mk_value(352));
- mtree_test_store_range(mt, 356, 398, xa_mk_value(356));
- mtree_test_store_range(mt, 360, 398, xa_mk_value(360));
- mtree_test_store_range(mt, 364, 398, xa_mk_value(364));
- mtree_test_store_range(mt, 376, 398, xa_mk_value(376));
- mtree_test_store_range(mt, 378, 398, xa_mk_value(378));
- mtree_test_store_range(mt, 384, 398, xa_mk_value(384));
- mtree_test_store_range(mt, 388, 398, xa_mk_value(388));
- mtree_test_store_range(mt, 374, 375, xa_mk_value(374));
+ mtree_test_store_range(mt, seq400[2], seq400[9],
+ xa_mk_value(seq400[2]));
+ mtree_test_store_range(mt, seq400[3], seq400[9],
+ xa_mk_value(seq400[3]));
+ mtree_test_store_range(mt, seq400[4], seq400[9],
+ xa_mk_value(seq400[4]));
+ mtree_test_store_range(mt, seq400[5], seq400[9],
+ xa_mk_value(seq400[5]));
+ mtree_test_store_range(mt, seq400[0], seq400[9],
+ xa_mk_value(seq400[0]));
+ mtree_test_store_range(mt, seq400[6], seq400[9],
+ xa_mk_value(seq400[6]));
+ mtree_test_store_range(mt, seq400[7], seq400[9],
+ xa_mk_value(seq400[7]));
+ mtree_test_store_range(mt, seq400[8], seq400[9],
+ xa_mk_value(seq400[8]));
+ mtree_test_store_range(mt, seq400[10], seq400[11],
+ xa_mk_value(seq400[10]));
mt_set_non_kernel(0);
mtree_destroy(mt);
}