|| !exp_info->ops->map_dma_buf
                          || !exp_info->ops->unmap_dma_buf
                          || !exp_info->ops->release
-                         || !exp_info->ops->kmap_atomic
-                         || !exp_info->ops->kmap
+                         || !exp_info->ops->map_atomic
+                         || !exp_info->ops->map
                          || !exp_info->ops->mmap)) {
                return ERR_PTR(-EINVAL);
        }
 {
        WARN_ON(!dmabuf);
 
-       return dmabuf->ops->kmap_atomic(dmabuf, page_num);
+       return dmabuf->ops->map_atomic(dmabuf, page_num);
 }
 EXPORT_SYMBOL_GPL(dma_buf_kmap_atomic);
 
 {
        WARN_ON(!dmabuf);
 
-       if (dmabuf->ops->kunmap_atomic)
-               dmabuf->ops->kunmap_atomic(dmabuf, page_num, vaddr);
+       if (dmabuf->ops->unmap_atomic)
+               dmabuf->ops->unmap_atomic(dmabuf, page_num, vaddr);
 }
 EXPORT_SYMBOL_GPL(dma_buf_kunmap_atomic);
 
 {
        WARN_ON(!dmabuf);
 
-       return dmabuf->ops->kmap(dmabuf, page_num);
+       return dmabuf->ops->map(dmabuf, page_num);
 }
 EXPORT_SYMBOL_GPL(dma_buf_kmap);
 
 {
        WARN_ON(!dmabuf);
 
-       if (dmabuf->ops->kunmap)
-               dmabuf->ops->kunmap(dmabuf, page_num, vaddr);
+       if (dmabuf->ops->unmap)
+               dmabuf->ops->unmap(dmabuf, page_num, vaddr);
 }
 EXPORT_SYMBOL_GPL(dma_buf_kunmap);
 
 
        .map_dma_buf    = armada_gem_prime_map_dma_buf,
        .unmap_dma_buf  = armada_gem_prime_unmap_dma_buf,
        .release        = drm_gem_dmabuf_release,
-       .kmap_atomic    = armada_gem_dmabuf_no_kmap,
-       .kunmap_atomic  = armada_gem_dmabuf_no_kunmap,
-       .kmap           = armada_gem_dmabuf_no_kmap,
-       .kunmap         = armada_gem_dmabuf_no_kunmap,
+       .map_atomic     = armada_gem_dmabuf_no_kmap,
+       .unmap_atomic   = armada_gem_dmabuf_no_kunmap,
+       .map            = armada_gem_dmabuf_no_kmap,
+       .unmap          = armada_gem_dmabuf_no_kunmap,
        .mmap           = armada_gem_dmabuf_mmap,
 };
 
 
        .map_dma_buf = drm_gem_map_dma_buf,
        .unmap_dma_buf = drm_gem_unmap_dma_buf,
        .release = drm_gem_dmabuf_release,
-       .kmap = drm_gem_dmabuf_kmap,
-       .kmap_atomic = drm_gem_dmabuf_kmap_atomic,
-       .kunmap = drm_gem_dmabuf_kunmap,
-       .kunmap_atomic = drm_gem_dmabuf_kunmap_atomic,
+       .map = drm_gem_dmabuf_kmap,
+       .map_atomic = drm_gem_dmabuf_kmap_atomic,
+       .unmap = drm_gem_dmabuf_kunmap,
+       .unmap_atomic = drm_gem_dmabuf_kunmap_atomic,
        .mmap = drm_gem_dmabuf_mmap,
        .vmap = drm_gem_dmabuf_vmap,
        .vunmap = drm_gem_dmabuf_vunmap,
 
        .map_dma_buf = i915_gem_map_dma_buf,
        .unmap_dma_buf = i915_gem_unmap_dma_buf,
        .release = drm_gem_dmabuf_release,
-       .kmap = i915_gem_dmabuf_kmap,
-       .kmap_atomic = i915_gem_dmabuf_kmap_atomic,
-       .kunmap = i915_gem_dmabuf_kunmap,
-       .kunmap_atomic = i915_gem_dmabuf_kunmap_atomic,
+       .map = i915_gem_dmabuf_kmap,
+       .map_atomic = i915_gem_dmabuf_kmap_atomic,
+       .unmap = i915_gem_dmabuf_kunmap,
+       .unmap_atomic = i915_gem_dmabuf_kunmap_atomic,
        .mmap = i915_gem_dmabuf_mmap,
        .vmap = i915_gem_dmabuf_vmap,
        .vunmap = i915_gem_dmabuf_vunmap,
 
        .map_dma_buf = mock_map_dma_buf,
        .unmap_dma_buf = mock_unmap_dma_buf,
        .release = mock_dmabuf_release,
-       .kmap = mock_dmabuf_kmap,
-       .kmap_atomic = mock_dmabuf_kmap_atomic,
-       .kunmap = mock_dmabuf_kunmap,
-       .kunmap_atomic = mock_dmabuf_kunmap_atomic,
+       .map = mock_dmabuf_kmap,
+       .map_atomic = mock_dmabuf_kmap_atomic,
+       .unmap = mock_dmabuf_kunmap,
+       .unmap_atomic = mock_dmabuf_kunmap_atomic,
        .mmap = mock_dmabuf_mmap,
        .vmap = mock_dmabuf_vmap,
        .vunmap = mock_dmabuf_vunmap,
 
        .release = omap_gem_dmabuf_release,
        .begin_cpu_access = omap_gem_dmabuf_begin_cpu_access,
        .end_cpu_access = omap_gem_dmabuf_end_cpu_access,
-       .kmap_atomic = omap_gem_dmabuf_kmap_atomic,
-       .kunmap_atomic = omap_gem_dmabuf_kunmap_atomic,
-       .kmap = omap_gem_dmabuf_kmap,
-       .kunmap = omap_gem_dmabuf_kunmap,
+       .map_atomic = omap_gem_dmabuf_kmap_atomic,
+       .unmap_atomic = omap_gem_dmabuf_kunmap_atomic,
+       .map = omap_gem_dmabuf_kmap,
+       .unmap = omap_gem_dmabuf_kunmap,
        .mmap = omap_gem_dmabuf_mmap,
 };
 
 
        .map_dma_buf = tegra_gem_prime_map_dma_buf,
        .unmap_dma_buf = tegra_gem_prime_unmap_dma_buf,
        .release = tegra_gem_prime_release,
-       .kmap_atomic = tegra_gem_prime_kmap_atomic,
-       .kunmap_atomic = tegra_gem_prime_kunmap_atomic,
-       .kmap = tegra_gem_prime_kmap,
-       .kunmap = tegra_gem_prime_kunmap,
+       .map_atomic = tegra_gem_prime_kmap_atomic,
+       .unmap_atomic = tegra_gem_prime_kunmap_atomic,
+       .map = tegra_gem_prime_kmap,
+       .unmap = tegra_gem_prime_kunmap,
        .mmap = tegra_gem_prime_mmap,
        .vmap = tegra_gem_prime_vmap,
        .vunmap = tegra_gem_prime_vunmap,
 
        .detach                 = udl_detach_dma_buf,
        .map_dma_buf            = udl_map_dma_buf,
        .unmap_dma_buf          = udl_unmap_dma_buf,
-       .kmap                   = udl_dmabuf_kmap,
-       .kmap_atomic            = udl_dmabuf_kmap_atomic,
-       .kunmap                 = udl_dmabuf_kunmap,
-       .kunmap_atomic          = udl_dmabuf_kunmap_atomic,
+       .map                    = udl_dmabuf_kmap,
+       .map_atomic             = udl_dmabuf_kmap_atomic,
+       .unmap                  = udl_dmabuf_kunmap,
+       .unmap_atomic           = udl_dmabuf_kunmap_atomic,
        .mmap                   = udl_dmabuf_mmap,
        .release                = drm_gem_dmabuf_release,
 };
 
        .map_dma_buf = vmw_prime_map_dma_buf,
        .unmap_dma_buf = vmw_prime_unmap_dma_buf,
        .release = NULL,
-       .kmap = vmw_prime_dmabuf_kmap,
-       .kmap_atomic = vmw_prime_dmabuf_kmap_atomic,
-       .kunmap = vmw_prime_dmabuf_kunmap,
-       .kunmap_atomic = vmw_prime_dmabuf_kunmap_atomic,
+       .map = vmw_prime_dmabuf_kmap,
+       .map_atomic = vmw_prime_dmabuf_kmap_atomic,
+       .unmap = vmw_prime_dmabuf_kunmap,
+       .unmap_atomic = vmw_prime_dmabuf_kunmap_atomic,
        .mmap = vmw_prime_dmabuf_mmap,
        .vmap = vmw_prime_dmabuf_vmap,
        .vunmap = vmw_prime_dmabuf_vunmap,
 
        .detach = vb2_dc_dmabuf_ops_detach,
        .map_dma_buf = vb2_dc_dmabuf_ops_map,
        .unmap_dma_buf = vb2_dc_dmabuf_ops_unmap,
-       .kmap = vb2_dc_dmabuf_ops_kmap,
-       .kmap_atomic = vb2_dc_dmabuf_ops_kmap,
+       .map = vb2_dc_dmabuf_ops_kmap,
+       .map_atomic = vb2_dc_dmabuf_ops_kmap,
        .vmap = vb2_dc_dmabuf_ops_vmap,
        .mmap = vb2_dc_dmabuf_ops_mmap,
        .release = vb2_dc_dmabuf_ops_release,
 
        .detach = vb2_dma_sg_dmabuf_ops_detach,
        .map_dma_buf = vb2_dma_sg_dmabuf_ops_map,
        .unmap_dma_buf = vb2_dma_sg_dmabuf_ops_unmap,
-       .kmap = vb2_dma_sg_dmabuf_ops_kmap,
-       .kmap_atomic = vb2_dma_sg_dmabuf_ops_kmap,
+       .map = vb2_dma_sg_dmabuf_ops_kmap,
+       .map_atomic = vb2_dma_sg_dmabuf_ops_kmap,
        .vmap = vb2_dma_sg_dmabuf_ops_vmap,
        .mmap = vb2_dma_sg_dmabuf_ops_mmap,
        .release = vb2_dma_sg_dmabuf_ops_release,
 
        .detach = vb2_vmalloc_dmabuf_ops_detach,
        .map_dma_buf = vb2_vmalloc_dmabuf_ops_map,
        .unmap_dma_buf = vb2_vmalloc_dmabuf_ops_unmap,
-       .kmap = vb2_vmalloc_dmabuf_ops_kmap,
-       .kmap_atomic = vb2_vmalloc_dmabuf_ops_kmap,
+       .map = vb2_vmalloc_dmabuf_ops_kmap,
+       .map_atomic = vb2_vmalloc_dmabuf_ops_kmap,
        .vmap = vb2_vmalloc_dmabuf_ops_vmap,
        .mmap = vb2_vmalloc_dmabuf_ops_mmap,
        .release = vb2_vmalloc_dmabuf_ops_release,
 
        .release = ion_dma_buf_release,
        .begin_cpu_access = ion_dma_buf_begin_cpu_access,
        .end_cpu_access = ion_dma_buf_end_cpu_access,
-       .kmap_atomic = ion_dma_buf_kmap,
-       .kunmap_atomic = ion_dma_buf_kunmap,
-       .kmap = ion_dma_buf_kmap,
-       .kunmap = ion_dma_buf_kunmap,
+       .map_atomic = ion_dma_buf_kmap,
+       .unmap_atomic = ion_dma_buf_kunmap,
+       .map = ion_dma_buf_kmap,
+       .unmap = ion_dma_buf_kunmap,
 };
 
 struct dma_buf *ion_share_dma_buf(struct ion_client *client,
 
 
 /**
  * struct dma_buf_ops - operations possible on struct dma_buf
- * @kmap_atomic: maps a page from the buffer into kernel address
- *              space, users may not block until the subsequent unmap call.
- *              This callback must not sleep.
- * @kunmap_atomic: [optional] unmaps a atomically mapped page from the buffer.
- *                This Callback must not sleep.
- * @kmap: maps a page from the buffer into kernel address space.
- * @kunmap: [optional] unmaps a page from the buffer.
+ * @map_atomic: maps a page from the buffer into kernel address
+ *             space, users may not block until the subsequent unmap call.
+ *             This callback must not sleep.
+ * @unmap_atomic: [optional] unmaps a atomically mapped page from the buffer.
+ *               This Callback must not sleep.
+ * @map: maps a page from the buffer into kernel address space.
+ * @unmap: [optional] unmaps a page from the buffer.
  * @vmap: [optional] creates a virtual mapping for the buffer into kernel
  *       address space. Same restrictions as for vmap and friends apply.
  * @vunmap: [optional] unmaps a vmap from the buffer
         * to be restarted.
         */
        int (*end_cpu_access)(struct dma_buf *, enum dma_data_direction);
-       void *(*kmap_atomic)(struct dma_buf *, unsigned long);
-       void (*kunmap_atomic)(struct dma_buf *, unsigned long, void *);
-       void *(*kmap)(struct dma_buf *, unsigned long);
-       void (*kunmap)(struct dma_buf *, unsigned long, void *);
+       void *(*map_atomic)(struct dma_buf *, unsigned long);
+       void (*unmap_atomic)(struct dma_buf *, unsigned long, void *);
+       void *(*map)(struct dma_buf *, unsigned long);
+       void (*unmap)(struct dma_buf *, unsigned long, void *);
 
        /**
         * @mmap: