#include <trace/events/mmap.h>
#include "internal.h"
+#define CONFIG_DEBUG_MAPLE_TREE
+#define CONFIG_DEBUG_VM_RB
+extern void mt_validate(struct maple_tree *mt);
#ifndef arch_mmap_check
#define arch_mmap_check(addr, len, flags) (0)
if (mas_retry(&mas, vma_mt))
continue;
+ if (vma && vma == ignore)
+ vma = vma->vm_next;
+
if (!vma)
break;
- if (vma != vma_mt) {
+
+ if ((vma != vma_mt) ||
+ (vma->vm_start != vma_mt->vm_start) ||
+ (vma->vm_end != vma_mt->vm_end)) {
pr_emerg("mt: %px %lu - %lu\n", vma_mt,
vma_mt->vm_start, vma_mt->vm_end);
pr_emerg("rb: %px %lu - %lu\n", vma,
vma->vm_start, vma->vm_end);
+ if (ignore)
+ pr_emerg("rb_skip %px %lu - %lu\n", ignore,
+ ignore->vm_start, ignore->vm_end);
+ pr_emerg("rb->next = %px %lu - %lu\n", vma->vm_next,
+ vma->vm_next->vm_start, vma->vm_next->vm_end);
+
+ mt_dump(mas.tree);
}
VM_BUG_ON(vma != vma_mt);
- if (vma)
- vma = vma->vm_next;
+ vma = vma->vm_next;
+
}
VM_BUG_ON(vma);
rcu_read_unlock();
+ mt_validate(&mm->mm_mt);
}
#endif
static void validate_mm_rb(struct rb_root *root, struct vm_area_struct *ignore)
struct anon_vma *anon_vma = vma->anon_vma;
struct anon_vma_chain *avc;
- pr_cont("vma: %lu-%lu", vma->vm_start, vma->vm_end);
+// pr_cont("vma: %lu-%lu", vma->vm_start, vma->vm_end);
if (anon_vma) {
- pr_cont(" anon");
+// pr_cont(" anon");
anon_vma_lock_read(anon_vma);
- list_for_each_entry(avc, &vma->anon_vma_chain, same_vma)
- anon_vma_interval_tree_verify(avc);
+// list_for_each_entry(avc, &vma->anon_vma_chain, same_vma)
+// anon_vma_interval_tree_verify(avc);
anon_vma_unlock_read(anon_vma);
}
- pr_cont("\n");
+// pr_cont("\n");
highest_address = vm_end_gap(vma);
vma = vma->vm_next;
if (mas_retry(&mas, entry))
continue;
+ VM_BUG_ON_MM(!entry, mm);
+
if (entry->vm_end != mas.last + 1) {
- printk("vma: entry %lu-%lu\tmt %lu-%lu\n",
- entry->vm_start, entry->vm_end,
- mas.index, mas.last);
+ printk("vma: %px entry %lu-%lu\tmt %lu-%lu\n",
+ mm, entry->vm_start, entry->vm_end,
+ mas.index, mas.last);
mt_dump(mas.tree);
}
VM_BUG_ON_MM(entry->vm_end != mas.last + 1, mm);
+ if (entry->vm_start != mas.index) {
+ printk("vma: %px entry %px %lu - %lu doesn't match\n",
+ mm, entry, entry->vm_start, entry->vm_end);
+ mt_dump(mas.tree);
+ }
VM_BUG_ON_MM(entry->vm_start != mas.index, mm);
mt_highest_address = vm_end_gap(entry);
mt_i++;
}
static void __vma_mt_erase(struct mm_struct *mm, struct vm_area_struct *vma)
{
- //printk("%s: mt_mod %p: ERASE, %lu, %lu,\n", __func__, mm, vma->vm_start,
- // vma->vm_end - 1);
+// printk("%s: mt_mod %px (%px): ERASE, %lu, %lu,\n", __func__, mm, vma,
+// vma->vm_start, vma->vm_end - 1);
mtree_erase(&mm->mm_mt, vma->vm_start);
+ mt_validate(&mm->mm_mt);
+}
+static void __vma_mt_szero(struct mm_struct *mm, unsigned long start,
+ unsigned long end)
+{
+// printk("%s: mt_mod %px (%px): SNULL, %lu, %lu,\n", __func__, mm, NULL,
+// start, end - 1);
+ mtree_store_range(&mm->mm_mt, start, end - 1, NULL, GFP_KERNEL);
}
static void __vma_mt_store(struct mm_struct *mm, struct vm_area_struct *vma)
{
- //printk("%s: mt_mod %p: STORE, %lu, %lu,\n", __func__, mm, vma->vm_start,
- // vma->vm_end - 1);
+// printk("%s: mt_mod %px (%px): STORE, %lu, %lu,\n", __func__, mm, vma,
+// vma->vm_start, vma->vm_end - 1);
mtree_store_range(&mm->mm_mt, vma->vm_start, vma->vm_end - 1, vma,
GFP_KERNEL);
+ mt_validate(&mm->mm_mt);
}
void vma_store(struct mm_struct *mm, struct vm_area_struct *vma)
{
__vma_mt_store(mm, vma);
__vma_link_list(mm, vma, prev);
__vma_link_rb(mm, vma, rb_link, rb_parent);
- validate_mm_mt(mm, NULL);
+ //validate_mm_mt(mm, NULL);
}
static void vma_link(struct mm_struct *mm, struct vm_area_struct *vma,
mm->map_count++;
}
-<<<<<<< HEAD
-static __always_inline void __vma_unlink(struct mm_struct *mm,
-=======
-// LRH: Fixed.
static __always_inline void __vma_unlink_common(struct mm_struct *mm,
->>>>>>> 6942ca05fee78... mm: Add maple tree to init-mm,mmap, mprotect, mm_types
struct vm_area_struct *vma,
struct vm_area_struct *ignore)
{
int remove_next = 0;
validate_mm(mm);
+ validate_mm_mt(mm, NULL);
+ //printk("%s %px %lu %lu\n", __func__, vma, start, end);
if (next && !insert) {
struct vm_area_struct *exporter = NULL, *importer = NULL;
} else {
VM_WARN_ON(expand != vma);
/*
- * case 1, 6, 7, remove_next == 2 is case 6,
+ * case 1, 6, 7: remove_next == 2 is case 6,
* remove_next == 1 is case 1 or 7.
*/
remove_next = 1 + (end > next->vm_end);
}
if (start != vma->vm_start) {
+ unsigned long old_start = vma->vm_start;
vma->vm_start = start;
- if (vma->vm_start < start)
- __vma_mt_erase(mm, vma);
+ if (old_start < start)
+ __vma_mt_szero(mm, old_start, start);
start_changed = true;
}
if (end != vma->vm_end) {
- if (vma->vm_end > end)
- __vma_mt_erase(mm, vma);
+ unsigned long old_end = vma->vm_end;
vma->vm_end = end;
+ if (old_end > end)
+ __vma_mt_szero(mm, end - 1, old_end);
end_changed = true;
}
vma->vm_pgoff = pgoff;
if (adjust_next) {
-<<<<<<< HEAD
next->vm_start += adjust_next;
next->vm_pgoff += adjust_next >> PAGE_SHIFT;
-=======
- next->vm_start += adjust_next << PAGE_SHIFT;
- next->vm_pgoff += adjust_next;
__vma_mt_store(mm, next);
->>>>>>> 6942ca05fee78... mm: Add maple tree to init-mm,mmap, mprotect, mm_types
}
if (file) {
* vma_merge has merged next into vma, and needs
* us to remove next before dropping the locks.
*/
+ /* Since we have expanded over this vma, the maple tree will
+ * have overwritten by storing the value */
if (remove_next != 3)
__vma_unlink(mm, next, next);
else
* us to insert it before dropping the locks
* (it may either follow vma or precede it).
*/
+ /* maple tree store is done in the __vma_link call in this
+ * call graph */
+// printk("insert %px %lu - %lu\n", insert, insert->vm_start, insert->vm_end);
__insert_vm_struct(mm, insert);
} else {
if (start_changed)
uprobe_mmap(insert);
validate_mm(mm);
+ validate_mm_mt(mm, NULL);
return 0;
}
unsigned long gap;
MA_STATE(mas, &mm->mm_mt, 0, 0);
- validate_mm_mt(mm, NULL);
-
/* Adjust search length to account for worst case alignment overhead */
length = info->length + info->align_mask;
if (length < info->length)
vma = remove_vma(vma);
} while (vma);
vm_unacct_memory(nr_accounted);
- validate_mm(mm);
+ //validate_mm(mm);
}
/*