int
        dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr,
-                                   dma_addr_t device_addr, size_t size, int
-                                   flags)
+                                   dma_addr_t device_addr, size_t size);
 
 Declare region of memory to be handed out by dma_alloc_coherent() when
 it's asked for coherent memory for this device.
 
 size is the size of the area (must be multiples of PAGE_SIZE).
 
-flags can be ORed together and are:
-
-- DMA_MEMORY_EXCLUSIVE - only allocate memory from the declared regions.
-  Do not allow dma_alloc_coherent() to fall back to system memory when
-  it's out of memory in the declared region.
-
 As a simplification for the platforms, only *one* such region of
 memory may be declared per device.
 
 
                return;
 
        dma_declare_coherent_memory(&pdev->dev, mx2_camera_base,
-                                   mx2_camera_base, MX2_CAMERA_BUF_SIZE,
-                                   DMA_MEMORY_EXCLUSIVE);
+                                   mx2_camera_base, MX2_CAMERA_BUF_SIZE);
 }
 
 static void __init visstrim_reserve(void)
        dma_declare_coherent_memory(&pdev->dev,
                                    mx2_camera_base + MX2_CAMERA_BUF_SIZE,
                                    mx2_camera_base + MX2_CAMERA_BUF_SIZE,
-                                   MX2_CAMERA_BUF_SIZE,
-                                   DMA_MEMORY_EXCLUSIVE);
+                                   MX2_CAMERA_BUF_SIZE);
 }
 
 /* DMA deinterlace */
        dma_declare_coherent_memory(&pdev->dev,
                                    mx2_camera_base + 2 * MX2_CAMERA_BUF_SIZE,
                                    mx2_camera_base + 2 * MX2_CAMERA_BUF_SIZE,
-                                   MX2_CAMERA_BUF_SIZE,
-                                   DMA_MEMORY_EXCLUSIVE);
+                                   MX2_CAMERA_BUF_SIZE);
 }
 
 /* Emma-PrP for format conversion */
         */
        ret = dma_declare_coherent_memory(&pdev->dev,
                                mx2_camera_base, mx2_camera_base,
-                               MX2_CAMERA_BUF_SIZE,
-                               DMA_MEMORY_EXCLUSIVE);
+                               MX2_CAMERA_BUF_SIZE);
        if (ret)
                pr_err("Failed to declare memory for emmaprp\n");
 }
 
 
        ret = dma_declare_coherent_memory(&pdev->dev,
                                          mx3_camera_base, mx3_camera_base,
-                                         MX3_CAMERA_BUF_SIZE,
-                                         DMA_MEMORY_EXCLUSIVE);
+                                         MX3_CAMERA_BUF_SIZE);
        if (ret)
                goto err;
 
 
        device_initialize(&ap325rxa_ceu_device.dev);
        arch_setup_pdev_archdata(&ap325rxa_ceu_device);
        dma_declare_coherent_memory(&ap325rxa_ceu_device.dev,
-                                   ceu_dma_membase, ceu_dma_membase,
-                                   ceu_dma_membase + CEU_BUFFER_MEMORY_SIZE - 1,
-                                   DMA_MEMORY_EXCLUSIVE);
+                       ceu_dma_membase, ceu_dma_membase,
+                       ceu_dma_membase + CEU_BUFFER_MEMORY_SIZE - 1);
 
        platform_device_add(&ap325rxa_ceu_device);
 
 
        dma_declare_coherent_memory(&ecovec_ceu_devices[0]->dev,
                                    ceu0_dma_membase, ceu0_dma_membase,
                                    ceu0_dma_membase +
-                                   CEU_BUFFER_MEMORY_SIZE - 1,
-                                   DMA_MEMORY_EXCLUSIVE);
+                                   CEU_BUFFER_MEMORY_SIZE - 1);
        platform_device_add(ecovec_ceu_devices[0]);
 
        device_initialize(&ecovec_ceu_devices[1]->dev);
        dma_declare_coherent_memory(&ecovec_ceu_devices[1]->dev,
                                    ceu1_dma_membase, ceu1_dma_membase,
                                    ceu1_dma_membase +
-                                   CEU_BUFFER_MEMORY_SIZE - 1,
-                                   DMA_MEMORY_EXCLUSIVE);
+                                   CEU_BUFFER_MEMORY_SIZE - 1);
        platform_device_add(ecovec_ceu_devices[1]);
 
        gpiod_add_lookup_table(&cn12_power_gpiod_table);
 
        device_initialize(&kfr2r09_ceu_device.dev);
        arch_setup_pdev_archdata(&kfr2r09_ceu_device);
        dma_declare_coherent_memory(&kfr2r09_ceu_device.dev,
-                                   ceu_dma_membase, ceu_dma_membase,
-                                   ceu_dma_membase + CEU_BUFFER_MEMORY_SIZE - 1,
-                                   DMA_MEMORY_EXCLUSIVE);
+                       ceu_dma_membase, ceu_dma_membase,
+                       ceu_dma_membase + CEU_BUFFER_MEMORY_SIZE - 1);
 
        platform_device_add(&kfr2r09_ceu_device);
 
 
        device_initialize(&migor_ceu_device.dev);
        arch_setup_pdev_archdata(&migor_ceu_device);
        dma_declare_coherent_memory(&migor_ceu_device.dev,
-                                   ceu_dma_membase, ceu_dma_membase,
-                                   ceu_dma_membase + CEU_BUFFER_MEMORY_SIZE - 1,
-                                   DMA_MEMORY_EXCLUSIVE);
+                       ceu_dma_membase, ceu_dma_membase,
+                       ceu_dma_membase + CEU_BUFFER_MEMORY_SIZE - 1);
 
        platform_device_add(&migor_ceu_device);
 
 
        dma_declare_coherent_memory(&ms7724se_ceu_devices[0]->dev,
                                    ceu0_dma_membase, ceu0_dma_membase,
                                    ceu0_dma_membase +
-                                   CEU_BUFFER_MEMORY_SIZE - 1,
-                                   DMA_MEMORY_EXCLUSIVE);
+                                   CEU_BUFFER_MEMORY_SIZE - 1);
        platform_device_add(ms7724se_ceu_devices[0]);
 
        device_initialize(&ms7724se_ceu_devices[1]->dev);
        dma_declare_coherent_memory(&ms7724se_ceu_devices[1]->dev,
                                    ceu1_dma_membase, ceu1_dma_membase,
                                    ceu1_dma_membase +
-                                   CEU_BUFFER_MEMORY_SIZE - 1,
-                                   DMA_MEMORY_EXCLUSIVE);
+                                   CEU_BUFFER_MEMORY_SIZE - 1);
        platform_device_add(ms7724se_ceu_devices[1]);
 
        return platform_add_devices(ms7724se_devices,
 
                BUG_ON(dma_declare_coherent_memory(&dev->dev,
                                                res.start,
                                                region.start,
-                                               resource_size(&res),
-                                               DMA_MEMORY_EXCLUSIVE));
+                                               resource_size(&res)));
                break;
        default:
                printk("PCI: Failed resource fixup\n");
 
        if (res) {
                err = dma_declare_coherent_memory(&pdev->dev, res->start,
                                                  res->start,
-                                                 resource_size(res),
-                                                 DMA_MEMORY_EXCLUSIVE);
+                                                 resource_size(res));
                if (err) {
                        dev_err(&pdev->dev, "Unable to declare CEU memory.\n");
                        return err;
 
 
        retval = dma_declare_coherent_memory(dev, mem->start,
                                         mem->start - mem->parent->start,
-                                        resource_size(mem),
-                                        DMA_MEMORY_EXCLUSIVE);
+                                        resource_size(mem));
        if (retval) {
                dev_err(dev, "cannot declare coherent memory\n");
                goto err1;
 
        }
 
        ret = dma_declare_coherent_memory(&dev->dev, sram->start, sram->start,
-                               resource_size(sram), DMA_MEMORY_EXCLUSIVE);
+                               resource_size(sram));
        if (ret)
                goto err_dma_declare;
 
 
        return 1;
 }
 
-/* flags for the coherent memory api */
-#define DMA_MEMORY_EXCLUSIVE           0x01
-
 #ifdef CONFIG_DMA_DECLARE_COHERENT
 int dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr,
-                               dma_addr_t device_addr, size_t size, int flags);
+                               dma_addr_t device_addr, size_t size);
 void dma_release_declared_memory(struct device *dev);
 #else
 static inline int
 dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr,
-                           dma_addr_t device_addr, size_t size, int flags)
+                           dma_addr_t device_addr, size_t size)
 {
        return -ENOSYS;
 }
 
        dma_addr_t      device_base;
        unsigned long   pfn_base;
        int             size;
-       int             flags;
        unsigned long   *bitmap;
        spinlock_t      spinlock;
        bool            use_dev_dma_pfn_offset;
                return mem->device_base;
 }
 
-static int dma_init_coherent_memory(
-       phys_addr_t phys_addr, dma_addr_t device_addr, size_t size, int flags,
-       struct dma_coherent_mem **mem)
+static int dma_init_coherent_memory(phys_addr_t phys_addr,
+               dma_addr_t device_addr, size_t size,
+               struct dma_coherent_mem **mem)
 {
        struct dma_coherent_mem *dma_mem = NULL;
        void *mem_base = NULL;
        dma_mem->device_base = device_addr;
        dma_mem->pfn_base = PFN_DOWN(phys_addr);
        dma_mem->size = pages;
-       dma_mem->flags = flags;
        spin_lock_init(&dma_mem->spinlock);
 
        *mem = dma_mem;
 }
 
 int dma_declare_coherent_memory(struct device *dev, phys_addr_t phys_addr,
-                               dma_addr_t device_addr, size_t size, int flags)
+                               dma_addr_t device_addr, size_t size)
 {
        struct dma_coherent_mem *mem;
        int ret;
 
-       ret = dma_init_coherent_memory(phys_addr, device_addr, size, flags, &mem);
+       ret = dma_init_coherent_memory(phys_addr, device_addr, size, &mem);
        if (ret)
                return ret;
 
                return 0;
 
        *ret = __dma_alloc_from_coherent(mem, size, dma_handle);
-       if (*ret)
-               return 1;
-
-       /*
-        * In the case where the allocation can not be satisfied from the
-        * per-device area, try to fall back to generic memory if the
-        * constraints allow it.
-        */
-       return mem->flags & DMA_MEMORY_EXCLUSIVE;
+       return 1;
 }
 
 void *dma_alloc_from_global_coherent(ssize_t size, dma_addr_t *dma_handle)
 
        if (!mem) {
                ret = dma_init_coherent_memory(rmem->base, rmem->base,
-                                              rmem->size,
-                                              DMA_MEMORY_EXCLUSIVE, &mem);
+                                              rmem->size, &mem);
                if (ret) {
                        pr_err("Reserved memory: failed to init DMA memory pool at %pa, size %ld MiB\n",
                                &rmem->base, (unsigned long)rmem->size / SZ_1M);