unsigned int ivsize = crypto_skcipher_ivsize(tfm);
        struct sun4i_cipher_req_ctx *ctx = skcipher_request_ctx(areq);
        u32 mode = ctx->mode;
+       void *backup_iv = NULL;
        /* when activating SS, the default FIFO space is SS_RX_DEFAULT(32) */
        u32 rx_cnt = SS_RX_DEFAULT;
        u32 tx_cnt = 0;
                return -EINVAL;
        }
 
+       if (areq->iv && ivsize > 0 && mode & SS_DECRYPTION) {
+               backup_iv = kzalloc(ivsize, GFP_KERNEL);
+               if (!backup_iv)
+                       return -ENOMEM;
+               scatterwalk_map_and_copy(backup_iv, areq->src, areq->cryptlen - ivsize, ivsize, 0);
+       }
+
        spin_lock_irqsave(&ss->slock, flags);
 
        for (i = 0; i < op->keylen; i += 4)
        } while (oleft);
 
        if (areq->iv) {
-               for (i = 0; i < 4 && i < ivsize / 4; i++) {
-                       v = readl(ss->base + SS_IV0 + i * 4);
-                       *(u32 *)(areq->iv + i * 4) = v;
+               if (mode & SS_DECRYPTION) {
+                       memcpy(areq->iv, backup_iv, ivsize);
+                       kfree_sensitive(backup_iv);
+               } else {
+                       scatterwalk_map_and_copy(areq->iv, areq->dst, areq->cryptlen - ivsize,
+                                                ivsize, 0);
                }
        }
 
        unsigned int ileft = areq->cryptlen;
        unsigned int oleft = areq->cryptlen;
        unsigned int todo;
+       void *backup_iv = NULL;
        struct sg_mapping_iter mi, mo;
        unsigned int oi, oo;    /* offset for in and out */
        unsigned int ob = 0;    /* offset in buf */
        if (need_fallback)
                return sun4i_ss_cipher_poll_fallback(areq);
 
+       if (areq->iv && ivsize > 0 && mode & SS_DECRYPTION) {
+               backup_iv = kzalloc(ivsize, GFP_KERNEL);
+               if (!backup_iv)
+                       return -ENOMEM;
+               scatterwalk_map_and_copy(backup_iv, areq->src, areq->cryptlen - ivsize, ivsize, 0);
+       }
+
        spin_lock_irqsave(&ss->slock, flags);
 
        for (i = 0; i < op->keylen; i += 4)
                }
        }
        if (areq->iv) {
-               for (i = 0; i < 4 && i < ivsize / 4; i++) {
-                       v = readl(ss->base + SS_IV0 + i * 4);
-                       *(u32 *)(areq->iv + i * 4) = v;
+               if (mode & SS_DECRYPTION) {
+                       memcpy(areq->iv, backup_iv, ivsize);
+                       kfree_sensitive(backup_iv);
+               } else {
+                       scatterwalk_map_and_copy(areq->iv, areq->dst, areq->cryptlen - ivsize,
+                                                ivsize, 0);
                }
        }