if (subtype == CRYPTO_ALG_SUB_TYPE_CTR)
                ctr_add_iv(iv, req->iv, DIV_ROUND_UP(reqctx->processed,
                                                       AES_BLOCK_SIZE));
-       else if (subtype == CRYPTO_ALG_SUB_TYPE_XTS)
-               ret = chcr_update_tweak(req, iv, 1);
+       else if (subtype == CRYPTO_ALG_SUB_TYPE_XTS) {
+               if (!reqctx->partial_req)
+                       memcpy(iv, reqctx->iv, AES_BLOCK_SIZE);
+               else
+                       ret = chcr_update_tweak(req, iv, 1);
+       }
        else if (subtype == CRYPTO_ALG_SUB_TYPE_CBC) {
                /*Already updated for Decrypt*/
                if (!reqctx->op)
        int bytes, err = -EINVAL;
 
        reqctx->processed = 0;
+       reqctx->partial_req = 0;
        if (!req->iv)
                goto error;
        if ((ablkctx->enckey_len == 0) || (ivsize > AES_BLOCK_SIZE) ||
        }
        reqctx->processed = bytes;
        reqctx->last_req_len = bytes;
+       reqctx->partial_req = !!(req->cryptlen - reqctx->processed);
 
        return 0;
 unmap:
 static int chcr_aes_encrypt(struct skcipher_request *req)
 {
        struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
+       struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
        struct chcr_context *ctx;
        struct chcr_dev *dev = c_ctx(tfm)->dev;
        struct sk_buff *skb = NULL;
                CRYPTO_ALG_SUB_TYPE_CBC && req->base.flags ==
                        CRYPTO_TFM_REQ_MAY_SLEEP ) {
                        ctx=c_ctx(tfm);
+                       reqctx->partial_req = 1;
                        wait_for_completion(&ctx->cbc_aes_aio_done);
         }
        return isfull ? -EBUSY : -EINPROGRESS;