/*** Global kva allocator ***/
 
+#define DEBUG_AUGMENT_PROPAGATE_CHECK 0
+
 #define VM_LAZY_FREE   0x02
 #define VM_VM_AREA     0x04
 
        }
 }
 
+#if DEBUG_AUGMENT_PROPAGATE_CHECK
+static void
+augment_tree_propagate_check(struct rb_node *n)
+{
+       struct vmap_area *va;
+       struct rb_node *node;
+       unsigned long size;
+       bool found = false;
+
+       if (n == NULL)
+               return;
+
+       va = rb_entry(n, struct vmap_area, rb_node);
+       size = va->subtree_max_size;
+       node = n;
+
+       while (node) {
+               va = rb_entry(node, struct vmap_area, rb_node);
+
+               if (get_subtree_max_size(node->rb_left) == size) {
+                       node = node->rb_left;
+               } else {
+                       if (va_size(va) == size) {
+                               found = true;
+                               break;
+                       }
+
+                       node = node->rb_right;
+               }
+       }
+
+       if (!found) {
+               va = rb_entry(n, struct vmap_area, rb_node);
+               pr_emerg("tree is corrupted: %lu, %lu\n",
+                       va_size(va), va->subtree_max_size);
+       }
+
+       augment_tree_propagate_check(n->rb_left);
+       augment_tree_propagate_check(n->rb_right);
+}
+#endif
+
 /*
  * This function populates subtree_max_size from bottom to upper
  * levels starting from VA point. The propagation must be done
                va->subtree_max_size = new_va_sub_max_size;
                node = rb_parent(&va->rb_node);
        }
+
+#if DEBUG_AUGMENT_PROPAGATE_CHECK
+       augment_tree_propagate_check(free_vmap_area_root.rb_node);
+#endif
 }
 
 static void