return min(srclen, dstlen);
 }
 
-static int chcr_cipher_fallback(struct crypto_skcipher *cipher,
+static int chcr_cipher_fallback(struct crypto_sync_skcipher *cipher,
                                u32 flags,
                                struct scatterlist *src,
                                struct scatterlist *dst,
 {
        int err;
 
-       SKCIPHER_REQUEST_ON_STACK(subreq, cipher);
+       SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, cipher);
 
-       skcipher_request_set_tfm(subreq, cipher);
+       skcipher_request_set_sync_tfm(subreq, cipher);
        skcipher_request_set_callback(subreq, flags, NULL, NULL);
        skcipher_request_set_crypt(subreq, src, dst,
                                   nbytes, iv);
        struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
        int err = 0;
 
-       crypto_skcipher_clear_flags(ablkctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
-       crypto_skcipher_set_flags(ablkctx->sw_cipher, cipher->base.crt_flags &
-                                 CRYPTO_TFM_REQ_MASK);
-       err = crypto_skcipher_setkey(ablkctx->sw_cipher, key, keylen);
+       crypto_sync_skcipher_clear_flags(ablkctx->sw_cipher,
+                               CRYPTO_TFM_REQ_MASK);
+       crypto_sync_skcipher_set_flags(ablkctx->sw_cipher,
+                               cipher->base.crt_flags & CRYPTO_TFM_REQ_MASK);
+       err = crypto_sync_skcipher_setkey(ablkctx->sw_cipher, key, keylen);
        tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
        tfm->crt_flags |=
-               crypto_skcipher_get_flags(ablkctx->sw_cipher) &
+               crypto_sync_skcipher_get_flags(ablkctx->sw_cipher) &
                CRYPTO_TFM_RES_MASK;
        return err;
 }
        struct chcr_context *ctx = crypto_tfm_ctx(tfm);
        struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
 
-       ablkctx->sw_cipher = crypto_alloc_skcipher(alg->cra_name, 0,
-                               CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
+       ablkctx->sw_cipher = crypto_alloc_sync_skcipher(alg->cra_name, 0,
+                               CRYPTO_ALG_NEED_FALLBACK);
        if (IS_ERR(ablkctx->sw_cipher)) {
                pr_err("failed to allocate fallback for %s\n", alg->cra_name);
                return PTR_ERR(ablkctx->sw_cipher);
        /*RFC3686 initialises IV counter value to 1, rfc3686(ctr(aes))
         * cannot be used as fallback in chcr_handle_cipher_response
         */
-       ablkctx->sw_cipher = crypto_alloc_skcipher("ctr(aes)", 0,
-                               CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
+       ablkctx->sw_cipher = crypto_alloc_sync_skcipher("ctr(aes)", 0,
+                               CRYPTO_ALG_NEED_FALLBACK);
        if (IS_ERR(ablkctx->sw_cipher)) {
                pr_err("failed to allocate fallback for %s\n", alg->cra_name);
                return PTR_ERR(ablkctx->sw_cipher);
        struct chcr_context *ctx = crypto_tfm_ctx(tfm);
        struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
 
-       crypto_free_skcipher(ablkctx->sw_cipher);
+       crypto_free_sync_skcipher(ablkctx->sw_cipher);
        if (ablkctx->aes_generic)
                crypto_free_cipher(ablkctx->aes_generic);
 }