}
 
        r = platform_get_resource(pdev, IORESOURCE_MEM, 0);
-       if (!r)
-               return -ENXIO;
+       if (!r) {
+               ret = -ENXIO;
+               goto release_channel;
+       }
 
        config.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
        config.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
        ret = dmaengine_slave_config(nfc->dma_chan, &config);
        if (ret < 0) {
                dev_err(nfc->dev, "Failed to configure DMA channel\n");
-               return ret;
+               goto release_channel;
        }
 
        /*
         * the provided buffer.
         */
        nfc->dma_buf = kmalloc(MAX_CHUNK_SIZE, GFP_KERNEL | GFP_DMA);
-       if (!nfc->dma_buf)
-               return -ENOMEM;
+       if (!nfc->dma_buf) {
+               ret = -ENOMEM;
+               goto release_channel;
+       }
 
        nfc->use_dma = true;
 
        return 0;
+
+release_channel:
+       dma_release_channel(nfc->dma_chan);
+       nfc->dma_chan = NULL;
+
+       return ret;
 }
 
 static void marvell_nfc_reset(struct marvell_nfc *nfc)
 
        ret = marvell_nand_chips_init(dev, nfc);
        if (ret)
-               goto unprepare_reg_clk;
+               goto release_dma;
 
        return 0;
 
+release_dma:
+       if (nfc->use_dma)
+               dma_release_channel(nfc->dma_chan);
 unprepare_reg_clk:
        clk_disable_unprepare(nfc->reg_clk);
 unprepare_core_clk: