void (*coherent_user_range)(unsigned long, unsigned long);
        void (*flush_kern_dcache_area)(void *, size_t);
 
+       void (*dma_map_area)(const void *, size_t, int);
+       void (*dma_unmap_area)(const void *, size_t, int);
+
        void (*dma_inv_range)(const void *, const void *);
        void (*dma_clean_range)(const void *, const void *);
        void (*dma_flush_range)(const void *, const void *);
  * is visible to DMA, or data written by DMA to system memory is
  * visible to the CPU.
  */
+#define dmac_map_area                  cpu_cache.dma_map_area
+#define dmac_unmap_area                cpu_cache.dma_unmap_area
 #define dmac_inv_range                 cpu_cache.dma_inv_range
 #define dmac_clean_range               cpu_cache.dma_clean_range
 #define dmac_flush_range               cpu_cache.dma_flush_range
  * is visible to DMA, or data written by DMA to system memory is
  * visible to the CPU.
  */
+#define dmac_map_area                  __glue(_CACHE,_dma_map_area)
+#define dmac_unmap_area                __glue(_CACHE,_dma_unmap_area)
 #define dmac_inv_range                 __glue(_CACHE,_dma_inv_range)
 #define dmac_clean_range               __glue(_CACHE,_dma_clean_range)
 #define dmac_flush_range               __glue(_CACHE,_dma_flush_range)
 
+extern void dmac_map_area(const void *, size_t, int);
+extern void dmac_unmap_area(const void *, size_t, int);
 extern void dmac_inv_range(const void *, const void *);
 extern void dmac_clean_range(const void *, const void *);
 extern void dmac_flush_range(const void *, const void *);
 
  */
 #include <linux/sched.h>
 #include <linux/mm.h>
+#include <linux/dma-mapping.h>
 #include <asm/mach/arch.h>
 #include <asm/thread_info.h>
 #include <asm/memory.h>
 #ifdef MULTI_PABORT
   DEFINE(PROCESSOR_PABT_FUNC,  offsetof(struct processor, _prefetch_abort));
 #endif
+  BLANK();
+  DEFINE(DMA_BIDIRECTIONAL,    DMA_BIDIRECTIONAL);
+  DEFINE(DMA_TO_DEVICE,                DMA_TO_DEVICE);
+  DEFINE(DMA_FROM_DEVICE,      DMA_FROM_DEVICE);
   return 0; 
 }
 
        mcr     p15, 0, r0, c7, c10, 4          @ drain write buffer
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(fa_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     fa_dma_clean_range
+       bcs     fa_dma_inv_range
+       b       fa_dma_flush_range
+ENDPROC(fa_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(fa_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(fa_dma_unmap_area)
+
        __INITDATA
 
        .type   fa_cache_fns, #object
        .long   fa_coherent_kern_range
        .long   fa_coherent_user_range
        .long   fa_flush_kern_dcache_area
+       .long   fa_dma_map_area
+       .long   fa_dma_unmap_area
        .long   fa_dma_inv_range
        .long   fa_dma_clean_range
        .long   fa_dma_flush_range
 
 ENTRY(v3_dma_clean_range)
        mov     pc, lr
 
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(v3_dma_unmap_area)
+       teq     r2, #DMA_TO_DEVICE
+       bne     v3_dma_inv_range
+       /* FALLTHROUGH */
+
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(v3_dma_map_area)
+       mov     pc, lr
+ENDPROC(v3_dma_unmap_area)
+ENDPROC(v3_dma_map_area)
+
        __INITDATA
 
        .type   v3_cache_fns, #object
        .long   v3_coherent_kern_range
        .long   v3_coherent_user_range
        .long   v3_flush_kern_dcache_area
+       .long   v3_dma_map_area
+       .long   v3_dma_unmap_area
        .long   v3_dma_inv_range
        .long   v3_dma_clean_range
        .long   v3_dma_flush_range
 
 ENTRY(v4_dma_clean_range)
        mov     pc, lr
 
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(v4_dma_unmap_area)
+       teq     r2, #DMA_TO_DEVICE
+       bne     v4_dma_inv_range
+       /* FALLTHROUGH */
+
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(v4_dma_map_area)
+       mov     pc, lr
+ENDPROC(v4_dma_unmap_area)
+ENDPROC(v4_dma_map_area)
+
        __INITDATA
 
        .type   v4_cache_fns, #object
        .long   v4_coherent_kern_range
        .long   v4_coherent_user_range
        .long   v4_flush_kern_dcache_area
+       .long   v4_dma_map_area
+       .long   v4_dma_unmap_area
        .long   v4_dma_inv_range
        .long   v4_dma_clean_range
        .long   v4_dma_flush_range
 
        .globl  v4wb_dma_flush_range
        .set    v4wb_dma_flush_range, v4wb_coherent_kern_range
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(v4wb_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     v4wb_dma_clean_range
+       bcs     v4wb_dma_inv_range
+       b       v4wb_dma_flush_range
+ENDPROC(v4wb_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(v4wb_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(v4wb_dma_unmap_area)
+
        __INITDATA
 
        .type   v4wb_cache_fns, #object
        .long   v4wb_coherent_kern_range
        .long   v4wb_coherent_user_range
        .long   v4wb_flush_kern_dcache_area
+       .long   v4wb_dma_map_area
+       .long   v4wb_dma_unmap_area
        .long   v4wb_dma_inv_range
        .long   v4wb_dma_clean_range
        .long   v4wb_dma_flush_range
 
        .globl  v4wt_dma_flush_range
        .equ    v4wt_dma_flush_range, v4wt_dma_inv_range
 
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(v4wt_dma_unmap_area)
+       add     r1, r1, r0
+       teq     r2, #DMA_TO_DEVICE
+       bne     v4wt_dma_inv_range
+       /* FALLTHROUGH */
+
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(v4wt_dma_map_area)
+       mov     pc, lr
+ENDPROC(v4wt_dma_unmap_area)
+ENDPROC(v4wt_dma_map_area)
+
        __INITDATA
 
        .type   v4wt_cache_fns, #object
        .long   v4wt_coherent_kern_range
        .long   v4wt_coherent_user_range
        .long   v4wt_flush_kern_dcache_area
+       .long   v4wt_dma_map_area
+       .long   v4wt_dma_unmap_area
        .long   v4wt_dma_inv_range
        .long   v4wt_dma_clean_range
        .long   v4wt_dma_flush_range
 
        mcr     p15, 0, r0, c7, c10, 4          @ drain write buffer
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(v6_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     v6_dma_clean_range
+       bcs     v6_dma_inv_range
+       b       v6_dma_flush_range
+ENDPROC(v6_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(v6_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(v6_dma_unmap_area)
+
        __INITDATA
 
        .type   v6_cache_fns, #object
        .long   v6_coherent_kern_range
        .long   v6_coherent_user_range
        .long   v6_flush_kern_dcache_area
+       .long   v6_dma_map_area
+       .long   v6_dma_unmap_area
        .long   v6_dma_inv_range
        .long   v6_dma_clean_range
        .long   v6_dma_flush_range
 
        mov     pc, lr
 ENDPROC(v7_dma_flush_range)
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(v7_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     v7_dma_clean_range
+       bcs     v7_dma_inv_range
+       b       v7_dma_flush_range
+ENDPROC(v7_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(v7_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(v7_dma_unmap_area)
+
        __INITDATA
 
        .type   v7_cache_fns, #object
        .long   v7_coherent_kern_range
        .long   v7_coherent_user_range
        .long   v7_flush_kern_dcache_area
+       .long   v7_dma_map_area
+       .long   v7_dma_unmap_area
        .long   v7_dma_inv_range
        .long   v7_dma_clean_range
        .long   v7_dma_flush_range
 
  */
 static void dma_cache_maint(const void *start, size_t size, int direction)
 {
-       void (*inner_op)(const void *, const void *);
        void (*outer_op)(unsigned long, unsigned long);
 
-       BUG_ON(!virt_addr_valid(start) || !virt_addr_valid(start + size - 1));
-
        switch (direction) {
        case DMA_FROM_DEVICE:           /* invalidate only */
-               inner_op = dmac_inv_range;
                outer_op = outer_inv_range;
                break;
        case DMA_TO_DEVICE:             /* writeback only */
-               inner_op = dmac_clean_range;
                outer_op = outer_clean_range;
                break;
        case DMA_BIDIRECTIONAL:         /* writeback and invalidate */
-               inner_op = dmac_flush_range;
                outer_op = outer_flush_range;
                break;
        default:
                BUG();
        }
 
-       inner_op(start, start + size);
        outer_op(__pa(start), __pa(start) + size);
 }
 
 void ___dma_single_cpu_to_dev(const void *kaddr, size_t size,
        enum dma_data_direction dir)
 {
+       BUG_ON(!virt_addr_valid(kaddr) || !virt_addr_valid(kaddr + size - 1));
+
+       dmac_map_area(kaddr, size, dir);
        dma_cache_maint(kaddr, size, dir);
 }
 EXPORT_SYMBOL(___dma_single_cpu_to_dev);
 void ___dma_single_dev_to_cpu(const void *kaddr, size_t size,
        enum dma_data_direction dir)
 {
-       /* nothing to do */
+       BUG_ON(!virt_addr_valid(kaddr) || !virt_addr_valid(kaddr + size - 1));
+
+       dmac_unmap_area(kaddr, size, dir);
 }
 EXPORT_SYMBOL(___dma_single_dev_to_cpu);
 
 static void dma_cache_maint_page(struct page *page, unsigned long offset,
-       size_t size, void (*op)(const void *, const void *))
+       size_t size, enum dma_data_direction dir,
+       void (*op)(const void *, size_t, int))
 {
        /*
         * A single sg entry may refer to multiple physically contiguous
                        vaddr = kmap_high_get(page);
                        if (vaddr) {
                                vaddr += offset;
-                               op(vaddr, vaddr + len);
+                               op(vaddr, len, dir);
                                kunmap_high(page);
                        }
                } else {
                        vaddr = page_address(page) + offset;
-                       op(vaddr, vaddr + len);
+                       op(vaddr, len, dir);
                }
                offset = 0;
                page++;
        size_t size, enum dma_data_direction dir)
 {
        unsigned long paddr;
-       void (*inner_op)(const void *, const void *);
        void (*outer_op)(unsigned long, unsigned long);
 
        switch (direction) {
        case DMA_FROM_DEVICE:           /* invalidate only */
-               inner_op = dmac_inv_range;
                outer_op = outer_inv_range;
                break;
        case DMA_TO_DEVICE:             /* writeback only */
-               inner_op = dmac_clean_range;
                outer_op = outer_clean_range;
                break;
        case DMA_BIDIRECTIONAL:         /* writeback and invalidate */
-               inner_op = dmac_flush_range;
                outer_op = outer_flush_range;
                break;
        default:
                BUG();
        }
 
-       dma_cache_maint_page(page, off, size, inner_op);
+       dma_cache_maint_page(page, off, size, dir, dmac_map_area);
 
        paddr = page_to_phys(page) + off;
        outer_op(paddr, paddr + size);
 void ___dma_page_dev_to_cpu(struct page *page, unsigned long off,
        size_t size, enum dma_data_direction dir)
 {
-       /* nothing to do */
+       dma_cache_maint_page(page, off, size, dir, dmac_unmap_area);
 }
 EXPORT_SYMBOL(___dma_page_dev_to_cpu);
 
 
        mcr     p15, 0, ip, c7, c10, 4          @ drain WB
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm1020_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     arm1020_dma_clean_range
+       bcs     arm1020_dma_inv_range
+       b       arm1020_dma_flush_range
+ENDPROC(arm1020_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm1020_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(arm1020_dma_unmap_area)
+
 ENTRY(arm1020_cache_fns)
        .long   arm1020_flush_kern_cache_all
        .long   arm1020_flush_user_cache_all
        .long   arm1020_coherent_kern_range
        .long   arm1020_coherent_user_range
        .long   arm1020_flush_kern_dcache_area
+       .long   arm1020_dma_map_area
+       .long   arm1020_dma_unmap_area
        .long   arm1020_dma_inv_range
        .long   arm1020_dma_clean_range
        .long   arm1020_dma_flush_range
 
        mcr     p15, 0, ip, c7, c10, 4          @ drain WB
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm1020e_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     arm1020e_dma_clean_range
+       bcs     arm1020e_dma_inv_range
+       b       arm1020e_dma_flush_range
+ENDPROC(arm1020e_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm1020e_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(arm1020e_dma_unmap_area)
+
 ENTRY(arm1020e_cache_fns)
        .long   arm1020e_flush_kern_cache_all
        .long   arm1020e_flush_user_cache_all
        .long   arm1020e_coherent_kern_range
        .long   arm1020e_coherent_user_range
        .long   arm1020e_flush_kern_dcache_area
+       .long   arm1020e_dma_map_area
+       .long   arm1020e_dma_unmap_area
        .long   arm1020e_dma_inv_range
        .long   arm1020e_dma_clean_range
        .long   arm1020e_dma_flush_range
 
        mcr     p15, 0, ip, c7, c10, 4          @ drain WB
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm1022_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     arm1022_dma_clean_range
+       bcs     arm1022_dma_inv_range
+       b       arm1022_dma_flush_range
+ENDPROC(arm1022_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm1022_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(arm1022_dma_unmap_area)
+
 ENTRY(arm1022_cache_fns)
        .long   arm1022_flush_kern_cache_all
        .long   arm1022_flush_user_cache_all
        .long   arm1022_coherent_kern_range
        .long   arm1022_coherent_user_range
        .long   arm1022_flush_kern_dcache_area
+       .long   arm1022_dma_map_area
+       .long   arm1022_dma_unmap_area
        .long   arm1022_dma_inv_range
        .long   arm1022_dma_clean_range
        .long   arm1022_dma_flush_range
 
        mcr     p15, 0, ip, c7, c10, 4          @ drain WB
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm1026_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     arm1026_dma_clean_range
+       bcs     arm1026_dma_inv_range
+       b       arm1026_dma_flush_range
+ENDPROC(arm1026_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm1026_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(arm1026_dma_unmap_area)
+
 ENTRY(arm1026_cache_fns)
        .long   arm1026_flush_kern_cache_all
        .long   arm1026_flush_user_cache_all
        .long   arm1026_coherent_kern_range
        .long   arm1026_coherent_user_range
        .long   arm1026_flush_kern_dcache_area
+       .long   arm1026_dma_map_area
+       .long   arm1026_dma_unmap_area
        .long   arm1026_dma_inv_range
        .long   arm1026_dma_clean_range
        .long   arm1026_dma_flush_range
 
        mcr     p15, 0, r0, c7, c10, 4          @ drain WB
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm920_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     arm920_dma_clean_range
+       bcs     arm920_dma_inv_range
+       b       arm920_dma_flush_range
+ENDPROC(arm920_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm920_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(arm920_dma_unmap_area)
+
 ENTRY(arm920_cache_fns)
        .long   arm920_flush_kern_cache_all
        .long   arm920_flush_user_cache_all
        .long   arm920_coherent_kern_range
        .long   arm920_coherent_user_range
        .long   arm920_flush_kern_dcache_area
+       .long   arm920_dma_map_area
+       .long   arm920_dma_unmap_area
        .long   arm920_dma_inv_range
        .long   arm920_dma_clean_range
        .long   arm920_dma_flush_range
 
        mcr     p15, 0, r0, c7, c10, 4          @ drain WB
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm922_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     arm922_dma_clean_range
+       bcs     arm922_dma_inv_range
+       b       arm922_dma_flush_range
+ENDPROC(arm922_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm922_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(arm922_dma_unmap_area)
+
 ENTRY(arm922_cache_fns)
        .long   arm922_flush_kern_cache_all
        .long   arm922_flush_user_cache_all
        .long   arm922_coherent_kern_range
        .long   arm922_coherent_user_range
        .long   arm922_flush_kern_dcache_area
+       .long   arm922_dma_map_area
+       .long   arm922_dma_unmap_area
        .long   arm922_dma_inv_range
        .long   arm922_dma_clean_range
        .long   arm922_dma_flush_range
 
        mcr     p15, 0, r0, c7, c10, 4          @ drain WB
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm925_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     arm925_dma_clean_range
+       bcs     arm925_dma_inv_range
+       b       arm925_dma_flush_range
+ENDPROC(arm925_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm925_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(arm925_dma_unmap_area)
+
 ENTRY(arm925_cache_fns)
        .long   arm925_flush_kern_cache_all
        .long   arm925_flush_user_cache_all
        .long   arm925_coherent_kern_range
        .long   arm925_coherent_user_range
        .long   arm925_flush_kern_dcache_area
+       .long   arm925_dma_map_area
+       .long   arm925_dma_unmap_area
        .long   arm925_dma_inv_range
        .long   arm925_dma_clean_range
        .long   arm925_dma_flush_range
 
        mcr     p15, 0, r0, c7, c10, 4          @ drain WB
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm926_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     arm926_dma_clean_range
+       bcs     arm926_dma_inv_range
+       b       arm926_dma_flush_range
+ENDPROC(arm926_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm926_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(arm926_dma_unmap_area)
+
 ENTRY(arm926_cache_fns)
        .long   arm926_flush_kern_cache_all
        .long   arm926_flush_user_cache_all
        .long   arm926_coherent_kern_range
        .long   arm926_coherent_user_range
        .long   arm926_flush_kern_dcache_area
+       .long   arm926_dma_map_area
+       .long   arm926_dma_unmap_area
        .long   arm926_dma_inv_range
        .long   arm926_dma_clean_range
        .long   arm926_dma_flush_range
 
        mcr     p15, 0, ip, c7, c10, 4          @ drain WB
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm940_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     arm940_dma_clean_range
+       bcs     arm940_dma_inv_range
+       b       arm940_dma_flush_range
+ENDPROC(arm940_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm940_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(arm940_dma_unmap_area)
+
 ENTRY(arm940_cache_fns)
        .long   arm940_flush_kern_cache_all
        .long   arm940_flush_user_cache_all
        .long   arm940_coherent_kern_range
        .long   arm940_coherent_user_range
        .long   arm940_flush_kern_dcache_area
+       .long   arm940_dma_map_area
+       .long   arm940_dma_unmap_area
        .long   arm940_dma_inv_range
        .long   arm940_dma_clean_range
        .long   arm940_dma_flush_range
 
        mcr     p15, 0, r0, c7, c10, 4          @ drain WB
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm946_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     arm946_dma_clean_range
+       bcs     arm946_dma_inv_range
+       b       arm946_dma_flush_range
+ENDPROC(arm946_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(arm946_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(arm946_dma_unmap_area)
+
 ENTRY(arm946_cache_fns)
        .long   arm946_flush_kern_cache_all
        .long   arm946_flush_user_cache_all
        .long   arm946_coherent_kern_range
        .long   arm946_coherent_user_range
        .long   arm946_flush_kern_dcache_area
+       .long   arm946_dma_map_area
+       .long   arm946_dma_unmap_area
        .long   arm946_dma_inv_range
        .long   arm946_dma_clean_range
        .long   arm946_dma_flush_range
 
        mcr     p15, 0, r0, c7, c10, 4          @ drain WB
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(feroceon_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     feroceon_dma_clean_range
+       bcs     feroceon_dma_inv_range
+       b       feroceon_dma_flush_range
+ENDPROC(feroceon_dma_map_area)
+
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(feroceon_range_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     feroceon_range_dma_clean_range
+       bcs     feroceon_range_dma_inv_range
+       b       feroceon_range_dma_flush_range
+ENDPROC(feroceon_range_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(feroceon_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(feroceon_dma_unmap_area)
+
 ENTRY(feroceon_cache_fns)
        .long   feroceon_flush_kern_cache_all
        .long   feroceon_flush_user_cache_all
        .long   feroceon_coherent_kern_range
        .long   feroceon_coherent_user_range
        .long   feroceon_flush_kern_dcache_area
+       .long   feroceon_dma_map_area
+       .long   feroceon_dma_unmap_area
        .long   feroceon_dma_inv_range
        .long   feroceon_dma_clean_range
        .long   feroceon_dma_flush_range
        .long   feroceon_coherent_kern_range
        .long   feroceon_coherent_user_range
        .long   feroceon_range_flush_kern_dcache_area
+       .long   feroceon_range_dma_map_area
+       .long   feroceon_dma_unmap_area
        .long   feroceon_range_dma_inv_range
        .long   feroceon_range_dma_clean_range
        .long   feroceon_range_dma_flush_range
 
        mcr     p15, 0, r0, c7, c10, 4          @ drain WB
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(mohawk_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     mohawk_dma_clean_range
+       bcs     mohawk_dma_inv_range
+       b       mohawk_dma_flush_range
+ENDPROC(mohawk_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(mohawk_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(mohawk_dma_unmap_area)
+
 ENTRY(mohawk_cache_fns)
        .long   mohawk_flush_kern_cache_all
        .long   mohawk_flush_user_cache_all
        .long   mohawk_coherent_kern_range
        .long   mohawk_coherent_user_range
        .long   mohawk_flush_kern_dcache_area
+       .long   mohawk_dma_map_area
+       .long   mohawk_dma_unmap_area
        .long   mohawk_dma_inv_range
        .long   mohawk_dma_clean_range
        .long   mohawk_dma_flush_range
 
        mcr     p15, 0, r0, c7, c10, 4          @ data write barrier
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(xsc3_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     xsc3_dma_clean_range
+       bcs     xsc3_dma_inv_range
+       b       xsc3_dma_flush_range
+ENDPROC(xsc3_dma_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(xsc3_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(xsc3_dma_unmap_area)
+
 ENTRY(xsc3_cache_fns)
        .long   xsc3_flush_kern_cache_all
        .long   xsc3_flush_user_cache_all
        .long   xsc3_coherent_kern_range
        .long   xsc3_coherent_user_range
        .long   xsc3_flush_kern_dcache_area
+       .long   xsc3_dma_map_area
+       .long   xsc3_dma_unmap_area
        .long   xsc3_dma_inv_range
        .long   xsc3_dma_clean_range
        .long   xsc3_dma_flush_range
 
        mcr     p15, 0, r0, c7, c10, 4          @ Drain Write (& Fill) Buffer
        mov     pc, lr
 
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(xscale_dma_map_area)
+       add     r1, r1, r0
+       cmp     r2, #DMA_TO_DEVICE
+       beq     xscale_dma_clean_range
+       bcs     xscale_dma_inv_range
+       b       xscale_dma_flush_range
+ENDPROC(xscale_dma_map_area)
+
+/*
+ *     dma_map_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(xscale_dma_a0_map_area)
+       add     r1, r1, r0
+       teq     r2, #DMA_TO_DEVICE
+       beq     xscale_dma_clean_range
+       b       xscale_dma_flush_range
+ENDPROC(xscsale_dma_a0_map_area)
+
+/*
+ *     dma_unmap_area(start, size, dir)
+ *     - start - kernel virtual start address
+ *     - size  - size of region
+ *     - dir   - DMA direction
+ */
+ENTRY(xscale_dma_unmap_area)
+       mov     pc, lr
+ENDPROC(xscale_dma_unmap_area)
+
 ENTRY(xscale_cache_fns)
        .long   xscale_flush_kern_cache_all
        .long   xscale_flush_user_cache_all
        .long   xscale_coherent_kern_range
        .long   xscale_coherent_user_range
        .long   xscale_flush_kern_dcache_area
+       .long   xscale_dma_map_area
+       .long   xscale_dma_unmap_area
        .long   xscale_dma_inv_range
        .long   xscale_dma_clean_range
        .long   xscale_dma_flush_range
        .long   xscale_coherent_kern_range
        .long   xscale_coherent_user_range
        .long   xscale_flush_kern_dcache_area
+       .long   xscale_dma_a0_map_area
+       .long   xscale_dma_unmap_area
        .long   xscale_dma_flush_range
        .long   xscale_dma_clean_range
        .long   xscale_dma_flush_range