}
/*
- * mas_get_empty_area() - Get the lowest address within the range that is
- * sufficient for the size requested.
+ * mas_empty_area() - Get the lowest address within the range that is sufficient
+ * for the size requested.
* @mas: The maple state
* @min: The lowest value of the range
* @max: The highest value of the range
* @size: The size needed
*/
-int mas_get_empty_area(struct ma_state *mas, unsigned long min,
- unsigned long max, unsigned long size)
+int mas_empty_area(struct ma_state *mas, unsigned long min, unsigned long max,
+ unsigned long size)
{
unsigned char offset;
unsigned long *pivots;
}
/*
- * mas_get_empty_area_rev() - Get the highest address within the range that is
+ * mas_empty_area_rev() - Get the highest address within the range that is
* sufficient for the size requested.
* @mas: The maple state
* @min: The lowest value of the range
* @max: The highest value of the range
* @size: The size needed
*/
-int mas_get_empty_area_rev(struct ma_state *mas, unsigned long min,
- unsigned long max, unsigned long size)
+int mas_empty_area_rev(struct ma_state *mas, unsigned long min,
+ unsigned long max, unsigned long size)
{
mas_start(mas);
{
int ret = 0;
- ret = mas_get_empty_area_rev(mas, min, max, size);
+ ret = mas_empty_area_rev(mas, min, max, size);
if (ret)
return ret;
mtree_init(mt, MAPLE_ALLOC_RANGE);
check_erase2_testset(mt, set11, ARRAY_SIZE(set11));
rcu_barrier();
- mas_get_empty_area_rev(&mas, 12288, 140014592737280, 0x2000);
+ mas_empty_area_rev(&mas, 12288, 140014592737280, 0x2000);
MT_BUG_ON(mt, mas.index != 140014592565248);
mtree_destroy(mt);
check_erase2_testset(mt, set13, ARRAY_SIZE(set13));
mtree_erase(mt, 140373516443648);
rcu_read_lock();
- mas_get_empty_area_rev(&mas, 0, 140373518663680, 4096);
+ mas_empty_area_rev(&mas, 0, 140373518663680, 4096);
rcu_read_unlock();
mtree_destroy(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
mtree_init(mt, MAPLE_ALLOC_RANGE);
check_erase2_testset(mt, set16, ARRAY_SIZE(set16));
rcu_barrier();
- mas_get_empty_area_rev(&mas, 4096, 139921865637888, 0x6000);
+ mas_empty_area_rev(&mas, 4096, 139921865637888, 0x6000);
MT_BUG_ON(mt, mas.index != 139921865523200);
mt_set_non_kernel(0);
mtree_destroy(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
check_erase2_testset(mt, set17, ARRAY_SIZE(set17));
rcu_barrier();
- mas_get_empty_area_rev(&mas, 4096, 139953197334528, 0x1000);
+ mas_empty_area_rev(&mas, 4096, 139953197334528, 0x1000);
MT_BUG_ON(mt, mas.index != 139953197318144);
mt_set_non_kernel(0);
mtree_destroy(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
check_erase2_testset(mt, set18, ARRAY_SIZE(set18));
rcu_barrier();
- mas_get_empty_area_rev(&mas, 4096, 140222972858368, 2215936);
+ mas_empty_area_rev(&mas, 4096, 140222972858368, 2215936);
MT_BUG_ON(mt, mas.index != 140222966259712);
mt_set_non_kernel(0);
mtree_destroy(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
check_erase2_testset(mt, set26, ARRAY_SIZE(set26));
rcu_barrier();
- mas_get_empty_area_rev(&mas, 4096, 140109042671616, 409600);
+ mas_empty_area_rev(&mas, 4096, 140109042671616, 409600);
MT_BUG_ON(mt, mas.index != 140109040549888);
mt_set_non_kernel(0);
mt_validate(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
check_erase2_testset(mt, set28, ARRAY_SIZE(set28));
rcu_barrier();
- mas_get_empty_area_rev(&mas, 4096, 139918413357056, 4190208);
+ mas_empty_area_rev(&mas, 4096, 139918413357056, 4190208);
mas.index = (mas.index + 2093056 - 0) & (~2093056); // align_mast = 2093056 offset = 0
MT_BUG_ON(mt, mas.index != 139918401601536);
mt_set_non_kernel(0);
mtree_init(mt, MAPLE_ALLOC_RANGE);
check_erase2_testset(mt, set33, ARRAY_SIZE(set33));
rcu_barrier();
- mas_get_empty_area_rev(&mas, 4096, 140583656296448, 134217728);
+ mas_empty_area_rev(&mas, 4096, 140583656296448, 134217728);
MT_BUG_ON(mt, mas.index != 140582869532672);
mt_set_non_kernel(0);
mt_validate(mt);
mtree_init(mt, MAPLE_ALLOC_RANGE);
check_erase2_testset(mt, set42, ARRAY_SIZE(set42));
rcu_barrier();
- mas_get_empty_area_rev(&mas, 4096, 4052029440, 28672);
+ mas_empty_area_rev(&mas, 4096, 4052029440, 28672);
MT_BUG_ON(mt, mas.index != 4041183232);
mt_set_non_kernel(0);
mt_validate(mt);
min, holes[i+1]>>12, holes[i+2]>>12,
holes[i] >> 12);
#endif
- MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, min,
+ MT_BUG_ON(mt, mas_empty_area_rev(&mas, min,
holes[i+1] >> 12,
holes[i+2] >> 12));
#if DEBUG_REV_RANGE
holes[i+1] >> 12, holes[i+2] >> 12,
min, holes[i+1]);
#endif
- MT_BUG_ON(mt, mas_get_empty_area(&mas, min >> 12,
+ MT_BUG_ON(mt, mas_empty_area(&mas, min >> 12,
holes[i+1] >> 12,
holes[i+2] >> 12));
MT_BUG_ON(mt, mas.index != holes[i] >> 12);
*/
mt_set_non_kernel(1);
mas_reset(&mas);
- MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, seq100[3], seq100[4],
+ MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[3], seq100[4],
seq100[5]));
MT_BUG_ON(mt, mas.index != index + 1);
rcu_read_unlock();
* 50 for size 3.
*/
mas_reset(&mas);
- MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, seq100[10], seq100[11],
+ MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[10], seq100[11],
seq100[12]));
MT_BUG_ON(mt, mas.index != seq100[6]);
rcu_read_unlock();
mas_reset(&mas);
rcu_read_lock();
- MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, seq100[16], seq100[15],
+ MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[15],
seq100[17]));
MT_BUG_ON(mt, mas.index != seq100[13]);
mt_validate(mt);
mtree_test_erase(mt, seq100[15]);
mas_reset(&mas);
rcu_read_lock();
- MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, seq100[16], seq100[19],
+ MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq100[16], seq100[19],
seq100[20]));
rcu_read_unlock();
MT_BUG_ON(mt, mas.index != seq100[18]);
mt_set_non_kernel(2);
mas_reset(&mas);
rcu_read_lock();
- MT_BUG_ON(mt, mas_get_empty_area_rev(&mas, seq2000[2], seq2000[3],
+ MT_BUG_ON(mt, mas_empty_area_rev(&mas, seq2000[2], seq2000[3],
seq2000[4]));
MT_BUG_ON(mt, mas.index != seq2000[1]);
rcu_read_unlock();
mtree_store_range(mt, 1470, 1475, NULL, GFP_KERNEL);
for (i = 0; i < count; i++) {
- mas_get_empty_area_rev(&mas, 0, 2000, 10);
+ mas_empty_area_rev(&mas, 0, 2000, 10);
mas_reset(&mas);
}
}