int mas_preallocate(struct ma_state *mas, void *entry, gfp_t gfp)
{
int ret;
+ int count = 1;
+ MA_WR_STATE(wr_mas, mas, entry);
+
+ wr_mas.content = mas_start(mas);
+ if (unlikely(mas_is_none(mas) || mas_is_ptr(mas))) {
+ if (!wr_mas.content && !entry)
+ count = 0;
+ else if (entry && mas->index == 0)
+ count = 0;
+
+ goto allocate;
+ }
+
+ if (likely(mas_wr_walk(&wr_mas))) {
+ printk("write %p to %lu-%lu\n", entry, mas->index, mas->last);
+ printk("wr_walk to %p\n", mas_mn(mas));
+ printk("wr node end %u vs %u\n", wr_mas.node_end,
+ mt_slots[wr_mas.type] - 3);
+ printk("content %p\n", wr_mas.content);
+ if (mas->index == wr_mas.r_min && mas->last == wr_mas.r_max) {
+ count = 0;
+ goto allocate;
+ } else if ((wr_mas.node_end - wr_mas.offset_end + mas->offset <=
+ mt_slots[wr_mas.type] - 3) &&
+ (wr_mas.node_end - wr_mas.offset_end + mas->offset >
+ mt_min_slots[wr_mas.type])) {
+ count = 1;
+ goto allocate;
+ } else if (!wr_mas.content && !entry) {
+ count = 0;
+ goto allocate;
+ }
+ }
- mas_node_count_gfp(mas, 1 + mas_mt_height(mas) * 3, gfp);
+ mas_reset(mas);
+ count = 1 + mas_mt_height(mas) * 3;
+allocate:
+ printk("Allocated %d\n", count);
+ mas_node_count_gfp(mas, count, gfp);
mas->mas_flags |= MA_STATE_PREALLOC;
if (likely(!mas_is_err(mas)))
return 0;
for (i = 0; i <= max; i++)
mtree_test_store_range(mt, i * 10, i * 10 + 5, &i);
+ printk("START\n");
+ mt_dump(mt);
+
+ /* Set up to store ptr over 10-20 range spanning many slots, but don't */
MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
allocated = mas_allocated(&mas);
MT_BUG_ON(mt, allocated != 0);
+ /* Set up to store ptr over 10-20 range spanning many slots twice, but don't */
MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated != 0);
+ /*
+ * Set up to store ptr over 10-20 range spanning many slots twice
+ * but then free a node and re-run the same preallocate.
+ */
MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
allocated = mas_allocated(&mas);
MT_BUG_ON(mt, allocated != 0);
+ printk("\n");
MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
allocated = mas_allocated(&mas);
MT_BUG_ON(mt, allocated != 0);
ma_free_rcu(mn);
+ exit(0);
MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
mas_store_prealloc(&mas, ptr);
MT_BUG_ON(mt, mas_allocated(&mas) != 0);
+ /* Store ptr over 10-20 requires 0 allocations */
MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
- MT_BUG_ON(mt, allocated == 0);
- MT_BUG_ON(mt, allocated != 1 + height * 3);
+ MT_BUG_ON(mt, allocated != 0);
mas_store_prealloc(&mas, ptr);
- MT_BUG_ON(mt, mas_allocated(&mas) != 0);
+
+ /* Store ptr over NULL 26-29 requires 0 allocations */
+ mas_set_range(&mas, 26, 29);
MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
- MT_BUG_ON(mt, allocated == 0);
- MT_BUG_ON(mt, allocated != 1 + height * 3);
+ MT_BUG_ON(mt, allocated != 0);
mas_store_prealloc(&mas, ptr);
MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL) != 0);
allocated = mas_allocated(&mas);
height = mas_mt_height(&mas);
MT_BUG_ON(mt, allocated != 0);
+
+ /* No allocation needed, storing NULL to a NULL range */
+ mas_set_range(&mas, 66, 68);
+ MT_BUG_ON(mt, mas_preallocate(&mas, NULL, GFP_KERNEL & GFP_NOWAIT) != 0);
+ allocated = mas_allocated(&mas);
+ MT_BUG_ON(mt, allocated != 0);
+ mt_dump(mas.tree);
+
+ /* No allocation needed, storing ptr to a node with NULL to a NULL range */
+ mt_set_non_kernel(1);
+ mas_set_range(&mas, 67, 68);
+ MT_BUG_ON(mt, mas_preallocate(&mas, ptr, GFP_KERNEL & GFP_NOWAIT) != 0);
+ allocated = mas_allocated(&mas);
+ MT_BUG_ON(mt, allocated != 1);
+ mt_dump(mas.tree);
}
static noinline void check_spanning_write(struct maple_tree *mt)