void address_space_init_once(struct address_space *mapping)
 {
        memset(mapping, 0, sizeof(*mapping));
-       INIT_RADIX_TREE(&mapping->page_tree, GFP_ATOMIC);
+       INIT_RADIX_TREE(&mapping->page_tree, GFP_ATOMIC | __GFP_ACCOUNT);
        spin_lock_init(&mapping->tree_lock);
        init_rwsem(&mapping->i_mmap_rwsem);
        INIT_LIST_HEAD(&mapping->private_list);
 
 
                /*
                 * Even if the caller has preloaded, try to allocate from the
-                * cache first for the new node to get accounted.
+                * cache first for the new node to get accounted to the memory
+                * cgroup.
                 */
                ret = kmem_cache_alloc(radix_tree_node_cachep,
-                                      gfp_mask | __GFP_ACCOUNT | __GFP_NOWARN);
+                                      gfp_mask | __GFP_NOWARN);
                if (ret)
                        goto out;
 
                kmemleak_update_trace(ret);
                goto out;
        }
-       ret = kmem_cache_alloc(radix_tree_node_cachep,
-                              gfp_mask | __GFP_ACCOUNT);
+       ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
 out:
        BUG_ON(radix_tree_is_internal_node(ret));
        return ret;
        struct radix_tree_node *node;
        int ret = -ENOMEM;
 
+       /*
+        * Nodes preloaded by one cgroup can be be used by another cgroup, so
+        * they should never be accounted to any particular memory cgroup.
+        */
+       gfp_mask &= ~__GFP_ACCOUNT;
+
        preempt_disable();
        rtp = this_cpu_ptr(&radix_tree_preloads);
        while (rtp->nr < nr) {