struct dma_map_ops *dma_ops;
 EXPORT_SYMBOL(dma_ops);
 
+static pgprot_t __get_dma_pgprot(struct dma_attrs *attrs, pgprot_t prot,
+                                bool coherent)
+{
+       if (dma_get_attr(DMA_ATTR_WRITE_COMBINE, attrs))
+               return pgprot_writecombine(prot);
+       else if (!coherent)
+               return pgprot_dmacoherent(prot);
+       return prot;
+}
+
 static void *__dma_alloc_coherent(struct device *dev, size_t size,
                                  dma_addr_t *dma_handle, gfp_t flags,
                                  struct dma_attrs *attrs)
        for (i = 0; i < (size >> PAGE_SHIFT); i++)
                map[i] = page + i;
        coherent_ptr = vmap(map, size >> PAGE_SHIFT, VM_MAP,
-                           pgprot_dmacoherent(pgprot_default));
+                           __get_dma_pgprot(attrs, pgprot_default, false));
        kfree(map);
        if (!coherent_ptr)
                goto no_map;
                void *cpu_addr, dma_addr_t dma_addr, size_t size,
                struct dma_attrs *attrs)
 {
-       vma->vm_page_prot = pgprot_dmacoherent(vma->vm_page_prot);
+       vma->vm_page_prot = __get_dma_pgprot(attrs, vma->vm_page_prot, false);
        return __dma_common_mmap(dev, vma, cpu_addr, dma_addr, size);
 }