From 9ed69444fa9041832ddff6e4a525314dc8747ed1 Mon Sep 17 00:00:00 2001 From: "Liam R. Howlett" Date: Mon, 24 Feb 2020 21:36:03 -0500 Subject: [PATCH] mm/mmap: Don't adjust next for maple tree operations. As maple tree store operation will alter the start location of the next node if it they overlap, there is no need to erase and store the same VMA. In fact, since the start location is now occupied by the current VMA, the erase operation will remove the wrong entry. Signed-off-by: Liam R. Howlett --- mm/mmap.c | 59 +++++++++++++++++-------------------------------------- 1 file changed, 18 insertions(+), 41 deletions(-) diff --git a/mm/mmap.c b/mm/mmap.c index 3c5a9eb9a7ad..0a741e5bd55e 100644 --- a/mm/mmap.c +++ b/mm/mmap.c @@ -434,6 +434,19 @@ static void validate_mm_mt(struct mm_struct *mm, vma->vm_next->vm_start, vma->vm_next->vm_end); mt_dump(mas.tree); + if (vma_mt->vm_end != mas.last + 1) { + pr_err("vma: %px vma_mt %lu-%lu\tmt %lu-%lu\n", + mm, vma_mt->vm_start, vma_mt->vm_end, + mas.index, mas.last); + mt_dump(mas.tree); + } + VM_BUG_ON_MM(vma_mt->vm_end != mas.last + 1, mm); + if (vma_mt->vm_start != mas.index) { + pr_err("vma: %px vma_mt %px %lu - %lu doesn't match\n", + mm, vma_mt, vma_mt->vm_start, vma_mt->vm_end); + mt_dump(mas.tree); + } + VM_BUG_ON_MM(vma_mt->vm_start != mas.index, mm); } VM_BUG_ON(vma != vma_mt); vma = vma->vm_next; @@ -464,13 +477,6 @@ static void validate_mm(struct mm_struct *mm) unsigned long highest_address = 0; struct vm_area_struct *vma = mm->mmap; - struct maple_tree *mt = &mm->mm_mt; - MA_STATE(mas, mt, 0, 0); - struct vm_area_struct *entry = NULL; - unsigned long mt_highest_address = 0; - int mt_i = 0; - - while (vma) { struct anon_vma *anon_vma = vma->anon_vma; struct anon_vma_chain *avc; @@ -489,37 +495,6 @@ static void validate_mm(struct mm_struct *mm) vma = vma->vm_next; i++; } - rcu_read_lock(); - mas_for_each(&mas, entry, ULONG_MAX) { - if (mas_retry(&mas, entry)) - continue; - - VM_BUG_ON_MM(!entry, mm); - - if (entry->vm_end != mas.last + 1) { - printk("vma: %px entry %lu-%lu\tmt %lu-%lu\n", - mm, entry->vm_start, entry->vm_end, - mas.index, mas.last); - mt_dump(mas.tree); - } - VM_BUG_ON_MM(entry->vm_end != mas.last + 1, mm); - if (entry->vm_start != mas.index) { - printk("vma: %px entry %px %lu - %lu doesn't match\n", - mm, entry, entry->vm_start, entry->vm_end); - mt_dump(mas.tree); - } - VM_BUG_ON_MM(entry->vm_start != mas.index, mm); - mt_highest_address = vm_end_gap(entry); - mt_i++; - } - rcu_read_unlock(); - if (i != mt_i) { - pr_emerg("%s: %d != %d\n", __func__, i, mt_i); - mt_dump(mas.tree); - } - VM_BUG_ON_MM(i != mt_i, mm); - VM_BUG_ON_MM(mt_highest_address != highest_address, mm); - if (i != mm->map_count) { pr_emerg("map_count %d vm_next %d\n", mm->map_count, i); bug = 1; @@ -936,6 +911,7 @@ int __vma_adjust(struct vm_area_struct *vma, unsigned long start, validate_mm(mm); validate_mm_mt(mm, NULL); + //printk("%s %px %lu %lu\n", __func__, vma, start, end); if (next && !insert) { struct vm_area_struct *exporter = NULL, *importer = NULL; @@ -1084,7 +1060,8 @@ again: if (adjust_next) { next->vm_start += adjust_next; next->vm_pgoff += adjust_next >> PAGE_SHIFT; - __vma_mt_store(mm, next); + // maple tree erase/store is unnecessary as the adjusting of + // the vma would have overwritten the area. } if (file) { @@ -2359,8 +2336,8 @@ found_highest: VM_BUG_ON(gap_end < gap_start); if (gap != gap_end) { - pr_err("%s: Gap was found: mt %lu gap_end %lu\n", __func__, gap, - gap_end); + pr_err("%s: %px Gap was found: mt %lu gap_end %lu\n", __func__, + mm, gap, gap_end); pr_err("window was %lu - %lu size %lu\n", info->high_limit, info->low_limit, length); pr_err("mas.min %lu max %lu mas.last %lu\n", mas.min, mas.max, -- 2.50.1