int dma_release_from_dev_coherent(struct device *dev, int order, void *vaddr);
int dma_mmap_from_dev_coherent(struct device *dev, struct vm_area_struct *vma,
void *cpu_addr, size_t size, int *ret);
+static inline bool use_dev_coherent_memory(struct device *dev)
+{
+ return dev->dma_mem ? true : false;
+}
#else
static inline int dma_declare_coherent_memory(struct device *dev,
phys_addr_t phys_addr, dma_addr_t device_addr, size_t size)
#define dma_release_from_dev_coherent(dev, order, vaddr) (0)
#define dma_mmap_from_dev_coherent(dev, vma, vaddr, order, ret) (0)
static inline void dma_release_coherent_memory(struct device *dev) { }
+#define use_dev_coherent_memory(dev) (0)
#endif /* CONFIG_DMA_DECLARE_COHERENT */
#ifdef CONFIG_DMA_GLOBAL_POOL
#include <linux/device.h>
#include <linux/dma-mapping.h>
+#include <linux/dma-map-ops.h>
#include <linux/dmapool.h>
#include <linux/kernel.h>
#include <linux/list.h>
#endif
spin_unlock_irqrestore(&pool->lock, flags);
- if (want_init_on_alloc(mem_flags))
+ if (want_init_on_alloc(mem_flags) &&
+ !use_dev_coherent_memory(pool->dev) &&
+ get_dma_ops(pool->dev))
memset(retval, 0, pool->size);
return retval;