spin_lock(&glob->lru_lock);
        }
 
-       if (bo->mem.mm_node) {
-               ttm_bo_mem_put(bo, &bo->mem);
-       }
+       ttm_bo_mem_put_locked(bo, &bo->mem);
 
        atomic_set(&bo->reserved, 0);
        wake_up_all(&bo->event_queue);
 }
 EXPORT_SYMBOL(ttm_bo_mem_put);
 
+void ttm_bo_mem_put_locked(struct ttm_buffer_object *bo, struct ttm_mem_reg *mem)
+{
+       struct ttm_mem_type_manager *man = &bo->bdev->man[mem->mem_type];
+
+       if (mem->mm_node)
+               (*man->func->put_node_locked)(man, mem);
+}
+EXPORT_SYMBOL(ttm_bo_mem_put_locked);
+
 /**
  * Repeatedly evict memory from the LRU for @mem_type until we create enough
  * space, or we've evicted everything and there isn't enough space.
 
        }
 }
 
+static void ttm_bo_man_put_node_locked(struct ttm_mem_type_manager *man,
+                                      struct ttm_mem_reg *mem)
+{
+       if (mem->mm_node) {
+               drm_mm_put_block(mem->mm_node);
+               mem->mm_node = NULL;
+       }
+}
+
 static int ttm_bo_man_init(struct ttm_mem_type_manager *man,
                           unsigned long p_size)
 {
        ttm_bo_man_takedown,
        ttm_bo_man_get_node,
        ttm_bo_man_put_node,
+       ttm_bo_man_put_node_locked,
        ttm_bo_man_debug
 };
 EXPORT_SYMBOL(ttm_bo_manager_func);
 
                         struct ttm_mem_reg *mem);
        void (*put_node)(struct ttm_mem_type_manager *man,
                         struct ttm_mem_reg *mem);
+       void (*put_node_locked)(struct ttm_mem_type_manager *man,
+                               struct ttm_mem_reg *mem);
        void (*debug)(struct ttm_mem_type_manager *man, const char *prefix);
 };
 
 
 extern void ttm_bo_mem_put(struct ttm_buffer_object *bo,
                           struct ttm_mem_reg *mem);
+extern void ttm_bo_mem_put_locked(struct ttm_buffer_object *bo,
+                                 struct ttm_mem_reg *mem);
 
 /**
  * ttm_bo_wait_for_cpu