ivsize = crypto_skcipher_ivsize(tfm);
        if (areq->iv && crypto_skcipher_ivsize(tfm) > 0) {
                rctx->ivlen = ivsize;
-               rctx->bounce_iv = kzalloc(ivsize, GFP_KERNEL | GFP_DMA);
-               if (!rctx->bounce_iv) {
-                       err = -ENOMEM;
-                       goto theend_key;
-               }
                if (rctx->op_dir & CE_DECRYPTION) {
-                       rctx->backup_iv = kzalloc(ivsize, GFP_KERNEL);
-                       if (!rctx->backup_iv) {
-                               err = -ENOMEM;
-                               goto theend_key;
-                       }
                        offset = areq->cryptlen - ivsize;
-                       scatterwalk_map_and_copy(rctx->backup_iv, areq->src,
+                       scatterwalk_map_and_copy(chan->backup_iv, areq->src,
                                                 offset, ivsize, 0);
                }
-               memcpy(rctx->bounce_iv, areq->iv, ivsize);
-               rctx->addr_iv = dma_map_single(ce->dev, rctx->bounce_iv, rctx->ivlen,
+               memcpy(chan->bounce_iv, areq->iv, ivsize);
+               rctx->addr_iv = dma_map_single(ce->dev, chan->bounce_iv, rctx->ivlen,
                                               DMA_TO_DEVICE);
                if (dma_mapping_error(ce->dev, rctx->addr_iv)) {
                        dev_err(ce->dev, "Cannot DMA MAP IV\n");
                        dma_unmap_single(ce->dev, rctx->addr_iv, rctx->ivlen, DMA_TO_DEVICE);
                offset = areq->cryptlen - ivsize;
                if (rctx->op_dir & CE_DECRYPTION) {
-                       memcpy(areq->iv, rctx->backup_iv, ivsize);
-                       kfree_sensitive(rctx->backup_iv);
+                       memcpy(areq->iv, chan->backup_iv, ivsize);
+                       memzero_explicit(chan->backup_iv, ivsize);
                } else {
                        scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
                                                 ivsize, 0);
                }
-               kfree(rctx->bounce_iv);
+               memzero_explicit(chan->bounce_iv, ivsize);
        }
 
-theend_key:
        dma_unmap_single(ce->dev, rctx->addr_key, op->keylen, DMA_TO_DEVICE);
 
 theend:
                        dma_unmap_single(ce->dev, rctx->addr_iv, rctx->ivlen, DMA_TO_DEVICE);
                offset = areq->cryptlen - ivsize;
                if (rctx->op_dir & CE_DECRYPTION) {
-                       memcpy(areq->iv, rctx->backup_iv, ivsize);
-                       kfree_sensitive(rctx->backup_iv);
+                       memcpy(areq->iv, chan->backup_iv, ivsize);
+                       memzero_explicit(chan->backup_iv, ivsize);
                } else {
                        scatterwalk_map_and_copy(areq->iv, areq->dst, offset,
                                                 ivsize, 0);
                }
-               kfree(rctx->bounce_iv);
+               memzero_explicit(chan->bounce_iv, ivsize);
        }
 
        dma_unmap_single(ce->dev, rctx->addr_key, op->keylen, DMA_TO_DEVICE);
 
                        .cra_priority = 400,
                        .cra_blocksize = AES_BLOCK_SIZE,
                        .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
-                               CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
+                               CRYPTO_ALG_ASYNC |
                                CRYPTO_ALG_NEED_FALLBACK,
                        .cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
                        .cra_module = THIS_MODULE,
                        .cra_priority = 400,
                        .cra_blocksize = AES_BLOCK_SIZE,
                        .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
-                               CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
+                               CRYPTO_ALG_ASYNC |
                                CRYPTO_ALG_NEED_FALLBACK,
                        .cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
                        .cra_module = THIS_MODULE,
                        .cra_priority = 400,
                        .cra_blocksize = DES3_EDE_BLOCK_SIZE,
                        .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
-                               CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
+                               CRYPTO_ALG_ASYNC |
                                CRYPTO_ALG_NEED_FALLBACK,
                        .cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
                        .cra_module = THIS_MODULE,
                        .cra_priority = 400,
                        .cra_blocksize = DES3_EDE_BLOCK_SIZE,
                        .cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
-                               CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
+                               CRYPTO_ALG_ASYNC |
                                CRYPTO_ALG_NEED_FALLBACK,
                        .cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
                        .cra_module = THIS_MODULE,
                        err = -ENOMEM;
                        goto error_engine;
                }
+               ce->chanlist[i].bounce_iv = devm_kmalloc(ce->dev, AES_BLOCK_SIZE,
+                                                        GFP_KERNEL | GFP_DMA);
+               if (!ce->chanlist[i].bounce_iv) {
+                       err = -ENOMEM;
+                       goto error_engine;
+               }
+               ce->chanlist[i].backup_iv = devm_kmalloc(ce->dev, AES_BLOCK_SIZE,
+                                                        GFP_KERNEL);
+               if (!ce->chanlist[i].backup_iv) {
+                       err = -ENOMEM;
+                       goto error_engine;
+               }
        }
        return 0;
 error_engine:
 
  * @status:    set to 1 by interrupt if task is done
  * @t_phy:     Physical address of task
  * @tl:                pointer to the current ce_task for this flow
+ * @backup_iv:         buffer which contain the next IV to store
+ * @bounce_iv:         buffer which contain the IV
  * @stat_req:  number of request done by this flow
  */
 struct sun8i_ce_flow {
        dma_addr_t t_phy;
        int timeout;
        struct ce_task *tl;
+       void *backup_iv;
+       void *bounce_iv;
 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
        unsigned long stat_req;
 #endif
  * struct sun8i_cipher_req_ctx - context for a skcipher request
  * @op_dir:            direction (encrypt vs decrypt) for this request
  * @flow:              the flow to use for this request
- * @backup_iv:         buffer which contain the next IV to store
- * @bounce_iv:         buffer which contain the IV
  * @ivlen:             size of bounce_iv
  * @nr_sgs:            The number of source SG (as given by dma_map_sg())
  * @nr_sgd:            The number of destination SG (as given by dma_map_sg())
 struct sun8i_cipher_req_ctx {
        u32 op_dir;
        int flow;
-       void *backup_iv;
-       void *bounce_iv;
        unsigned int ivlen;
        int nr_sgs;
        int nr_sgd;