#define RK_CRYPTO_DEC                  BIT(0)
 
+static int rk_cipher_need_fallback(struct skcipher_request *req)
+{
+       struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
+       unsigned int bs = crypto_skcipher_blocksize(tfm);
+       struct scatterlist *sgs, *sgd;
+       unsigned int stodo, dtodo, len;
+
+       if (!req->cryptlen)
+               return true;
+
+       len = req->cryptlen;
+       sgs = req->src;
+       sgd = req->dst;
+       while (sgs && sgd) {
+               if (!IS_ALIGNED(sgs->offset, sizeof(u32))) {
+                       return true;
+               }
+               if (!IS_ALIGNED(sgd->offset, sizeof(u32))) {
+                       return true;
+               }
+               stodo = min(len, sgs->length);
+               if (stodo % bs) {
+                       return true;
+               }
+               dtodo = min(len, sgd->length);
+               if (dtodo % bs) {
+                       return true;
+               }
+               if (stodo != dtodo) {
+                       return true;
+               }
+               len -= stodo;
+               sgs = sg_next(sgs);
+               sgd = sg_next(sgd);
+       }
+       return false;
+}
+
+static int rk_cipher_fallback(struct skcipher_request *areq)
+{
+       struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
+       struct rk_cipher_ctx *op = crypto_skcipher_ctx(tfm);
+       struct rk_cipher_rctx *rctx = skcipher_request_ctx(areq);
+       int err;
+
+       skcipher_request_set_tfm(&rctx->fallback_req, op->fallback_tfm);
+       skcipher_request_set_callback(&rctx->fallback_req, areq->base.flags,
+                                     areq->base.complete, areq->base.data);
+       skcipher_request_set_crypt(&rctx->fallback_req, areq->src, areq->dst,
+                                  areq->cryptlen, areq->iv);
+       if (rctx->mode & RK_CRYPTO_DEC)
+               err = crypto_skcipher_decrypt(&rctx->fallback_req);
+       else
+               err = crypto_skcipher_encrypt(&rctx->fallback_req);
+       return err;
+}
+
 static void rk_crypto_complete(struct crypto_async_request *base, int err)
 {
        if (base->complete)
 static int rk_handle_req(struct rk_crypto_info *dev,
                         struct skcipher_request *req)
 {
-       if (!IS_ALIGNED(req->cryptlen, dev->align_size))
-               return -EINVAL;
-       else
-               return dev->enqueue(dev, &req->base);
+       if (rk_cipher_need_fallback(req))
+               return rk_cipher_fallback(req);
+
+       return dev->enqueue(dev, &req->base);
 }
 
 static int rk_aes_setkey(struct crypto_skcipher *cipher,
                return -EINVAL;
        ctx->keylen = keylen;
        memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
-       return 0;
+
+       return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
 }
 
 static int rk_des_setkey(struct crypto_skcipher *cipher,
 
        ctx->keylen = keylen;
        memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
-       return 0;
+
+       return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
 }
 
 static int rk_tdes_setkey(struct crypto_skcipher *cipher,
 
        ctx->keylen = keylen;
        memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
-       return 0;
+       return crypto_skcipher_setkey(ctx->fallback_tfm, key, keylen);
 }
 
 static int rk_aes_ecb_encrypt(struct skcipher_request *req)
 {
        struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
        struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
+       const char *name = crypto_tfm_alg_name(&tfm->base);
        struct rk_crypto_tmp *algt;
 
        algt = container_of(alg, struct rk_crypto_tmp, alg.skcipher);
        if (!ctx->dev->addr_vir)
                return -ENOMEM;
 
+       ctx->fallback_tfm = crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
+       if (IS_ERR(ctx->fallback_tfm)) {
+               dev_err(ctx->dev->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
+                       name, PTR_ERR(ctx->fallback_tfm));
+               return PTR_ERR(ctx->fallback_tfm);
+       }
+
+       tfm->reqsize = sizeof(struct rk_cipher_rctx) +
+               crypto_skcipher_reqsize(ctx->fallback_tfm);
+
        return 0;
 }
 
        struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
 
        free_page((unsigned long)ctx->dev->addr_vir);
+       crypto_free_skcipher(ctx->fallback_tfm);
 }
 
 struct rk_crypto_tmp rk_ecb_aes_alg = {
                .base.cra_name          = "ecb(aes)",
                .base.cra_driver_name   = "ecb-aes-rk",
                .base.cra_priority      = 300,
-               .base.cra_flags         = CRYPTO_ALG_ASYNC,
+               .base.cra_flags         = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
                .base.cra_blocksize     = AES_BLOCK_SIZE,
                .base.cra_ctxsize       = sizeof(struct rk_cipher_ctx),
                .base.cra_alignmask     = 0x0f,
                .base.cra_name          = "cbc(aes)",
                .base.cra_driver_name   = "cbc-aes-rk",
                .base.cra_priority      = 300,
-               .base.cra_flags         = CRYPTO_ALG_ASYNC,
+               .base.cra_flags         = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
                .base.cra_blocksize     = AES_BLOCK_SIZE,
                .base.cra_ctxsize       = sizeof(struct rk_cipher_ctx),
                .base.cra_alignmask     = 0x0f,
                .base.cra_name          = "ecb(des)",
                .base.cra_driver_name   = "ecb-des-rk",
                .base.cra_priority      = 300,
-               .base.cra_flags         = CRYPTO_ALG_ASYNC,
+               .base.cra_flags         = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
                .base.cra_blocksize     = DES_BLOCK_SIZE,
                .base.cra_ctxsize       = sizeof(struct rk_cipher_ctx),
                .base.cra_alignmask     = 0x07,
                .base.cra_name          = "cbc(des)",
                .base.cra_driver_name   = "cbc-des-rk",
                .base.cra_priority      = 300,
-               .base.cra_flags         = CRYPTO_ALG_ASYNC,
+               .base.cra_flags         = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
                .base.cra_blocksize     = DES_BLOCK_SIZE,
                .base.cra_ctxsize       = sizeof(struct rk_cipher_ctx),
                .base.cra_alignmask     = 0x07,
                .base.cra_name          = "ecb(des3_ede)",
                .base.cra_driver_name   = "ecb-des3-ede-rk",
                .base.cra_priority      = 300,
-               .base.cra_flags         = CRYPTO_ALG_ASYNC,
+               .base.cra_flags         = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
                .base.cra_blocksize     = DES_BLOCK_SIZE,
                .base.cra_ctxsize       = sizeof(struct rk_cipher_ctx),
                .base.cra_alignmask     = 0x07,
                .base.cra_name          = "cbc(des3_ede)",
                .base.cra_driver_name   = "cbc-des3-ede-rk",
                .base.cra_priority      = 300,
-               .base.cra_flags         = CRYPTO_ALG_ASYNC,
+               .base.cra_flags         = CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK,
                .base.cra_blocksize     = DES_BLOCK_SIZE,
                .base.cra_ctxsize       = sizeof(struct rk_cipher_ctx),
                .base.cra_alignmask     = 0x07,