op = IOAT_OP_XOR;
 
        dest_dma = dma_map_page(dev, dest, 0, PAGE_SIZE, DMA_FROM_DEVICE);
+       if (dma_mapping_error(dev, dest_dma))
+               goto dma_unmap;
+
        for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
+               dma_srcs[i] = DMA_ERROR_CODE;
+       for (i = 0; i < IOAT_NUM_SRC_TEST; i++) {
                dma_srcs[i] = dma_map_page(dev, xor_srcs[i], 0, PAGE_SIZE,
                                           DMA_TO_DEVICE);
+               if (dma_mapping_error(dev, dma_srcs[i]))
+                       goto dma_unmap;
+       }
        tx = dma->device_prep_dma_xor(dma_chan, dest_dma, dma_srcs,
                                      IOAT_NUM_SRC_TEST, PAGE_SIZE,
                                      DMA_PREP_INTERRUPT);
                goto dma_unmap;
        }
 
-       dma_unmap_page(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE);
        for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
                dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE, DMA_TO_DEVICE);
 
        }
        dma_sync_single_for_device(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE);
 
+       dma_unmap_page(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE);
+
        /* skip validate if the capability is not present */
        if (!dma_has_cap(DMA_XOR_VAL, dma_chan->device->cap_mask))
                goto free_resources;
        xor_val_result = 1;
 
        for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
+               dma_srcs[i] = DMA_ERROR_CODE;
+       for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++) {
                dma_srcs[i] = dma_map_page(dev, xor_val_srcs[i], 0, PAGE_SIZE,
                                           DMA_TO_DEVICE);
+               if (dma_mapping_error(dev, dma_srcs[i]))
+                       goto dma_unmap;
+       }
        tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs,
                                          IOAT_NUM_SRC_TEST + 1, PAGE_SIZE,
                                          &xor_val_result, DMA_PREP_INTERRUPT);
 
        xor_val_result = 0;
        for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
+               dma_srcs[i] = DMA_ERROR_CODE;
+       for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++) {
                dma_srcs[i] = dma_map_page(dev, xor_val_srcs[i], 0, PAGE_SIZE,
                                           DMA_TO_DEVICE);
+               if (dma_mapping_error(dev, dma_srcs[i]))
+                       goto dma_unmap;
+       }
        tx = dma->device_prep_dma_xor_val(dma_chan, dma_srcs,
                                          IOAT_NUM_SRC_TEST + 1, PAGE_SIZE,
                                          &xor_val_result, DMA_PREP_INTERRUPT);
        goto free_resources;
 dma_unmap:
        if (op == IOAT_OP_XOR) {
-               dma_unmap_page(dev, dest_dma, PAGE_SIZE, DMA_FROM_DEVICE);
+               if (dest_dma != DMA_ERROR_CODE)
+                       dma_unmap_page(dev, dest_dma, PAGE_SIZE,
+                                      DMA_FROM_DEVICE);
                for (i = 0; i < IOAT_NUM_SRC_TEST; i++)
-                       dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE,
-                                      DMA_TO_DEVICE);
+                       if (dma_srcs[i] != DMA_ERROR_CODE)
+                               dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE,
+                                              DMA_TO_DEVICE);
        } else if (op == IOAT_OP_XOR_VAL) {
                for (i = 0; i < IOAT_NUM_SRC_TEST + 1; i++)
-                       dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE,
-                                      DMA_TO_DEVICE);
+                       if (dma_srcs[i] != DMA_ERROR_CODE)
+                               dma_unmap_page(dev, dma_srcs[i], PAGE_SIZE,
+                                              DMA_TO_DEVICE);
        }
 free_resources:
        dma->device_free_chan_resources(dma_chan);