#include <crypto/internal/des.h>
 #include <crypto/gcm.h>
 #include <crypto/ghash.h>
+#include <crypto/poly1305.h>
 #include <crypto/sha.h>
 #include <crypto/xts.h>
 #include <crypto/skcipher.h>
 
        u32 mode;
        enum safexcel_cipher_alg alg;
-       bool aead;
-       int  xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
+       char aead; /* !=0=AEAD, 2=IPSec ESP AEAD */
+       char xcm;  /* 0=authenc, 1=GCM, 2 reserved for CCM */
 
        __le32 key[16];
        u32 nonce;
        u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
 
        struct crypto_cipher *hkaes;
+       struct crypto_aead *fback;
 };
 
 struct safexcel_cipher_req {
        } else if (ctx->alg == SAFEXCEL_CHACHA20) {
                cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
 
-               /* 96 bit nonce part */
-               memcpy(&cdesc->control_data.token[0], &iv[4], 12);
-               /* 32 bit counter */
-               cdesc->control_data.token[3] = *(u32 *)iv;
+               if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
+                       /* 32 bit nonce part */
+                       cdesc->control_data.token[0] = ctx->nonce;
+                       /* 64 bit IV part */
+                       memcpy(&cdesc->control_data.token[1], iv, 8);
+                       /* 32 bit counter, starting at 0 */
+                       cdesc->control_data.token[3] = 0;
+               } else if (ctx->aead) {
+                       /* 96 bit nonce part */
+                       memcpy(&cdesc->control_data.token[0], iv, 12);
+                       /* 32 bit counter, starting at 0 */
+                       cdesc->control_data.token[3] = 0;
+               } else {
+                       /* 96 bit nonce part */
+                       memcpy(&cdesc->control_data.token[0], &iv[4], 12);
+                       /* 32 bit counter */
+                       cdesc->control_data.token[3] = *(u32 *)iv;
+               }
 
                return;
        } else if (ctx->xcm == EIP197_XCM_MODE_GCM) {
                token[13].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
        }
 
+       if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
+               /* For ESP mode, skip over the IV */
+               token[7].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
+               token[7].packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
+
+               assoclen -= EIP197_AEAD_IPSEC_IV_SIZE;
+       }
+
        token[6].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
        token[6].packet_length = assoclen;
+       token[6].instructions = EIP197_TOKEN_INS_LAST |
+                               EIP197_TOKEN_INS_TYPE_HASH;
 
-       if (likely(cryptlen)) {
-               token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
-
+       if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
                token[10].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
                token[10].packet_length = cryptlen;
                token[10].stat = EIP197_TOKEN_STAT_LAST_HASH;
                                         EIP197_TOKEN_INS_TYPE_OUTPUT;
        } else if (ctx->xcm != EIP197_XCM_MODE_CCM) {
                token[6].stat = EIP197_TOKEN_STAT_LAST_HASH;
-               token[6].instructions = EIP197_TOKEN_INS_LAST |
-                                       EIP197_TOKEN_INS_TYPE_HASH;
        }
 
        if (!ctx->xcm)
        token[9].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
                                EIP197_TOKEN_INS_TYPE_CRYPTO;
 
-       if (ctx->xcm == EIP197_XCM_MODE_GCM) {
-               token[6].instructions = EIP197_TOKEN_INS_LAST |
-                                       EIP197_TOKEN_INS_TYPE_HASH;
-       } else {
+       if (ctx->xcm != EIP197_XCM_MODE_GCM) {
                u8 *cbcmaciv = (u8 *)&token[1];
                u32 *aadlen = (u32 *)&token[5];
 
                        goto badkey;
                break;
        default:
-               dev_err(priv->dev, "aead: unsupported hash algorithm\n");
+               dev_err(priv->dev, "aead: unsupported hash algorithmn");
                goto badkey;
        }
 
                                CONTEXT_CONTROL_DIGEST_XCM |
                                ctx->hash_alg |
                                CONTEXT_CONTROL_SIZE(ctrl_size);
+               } else if (ctx->alg == SAFEXCEL_CHACHA20) {
+                       /* Chacha20-Poly1305 */
+                       cdesc->control_data.control0 =
+                               CONTEXT_CONTROL_KEY_EN |
+                               CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
+                               (sreq->direction == SAFEXCEL_ENCRYPT ?
+                                       CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
+                                       CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
+                               ctx->hash_alg |
+                               CONTEXT_CONTROL_SIZE(ctrl_size);
+                       return 0;
                } else {
                        ctrl_size += ctx->state_sz / sizeof(u32) * 2;
                        cdesc->control_data.control0 =
        },
 };
 
-static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
-                                            const u8 *key, unsigned int len)
+static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
+                                    const u8 *key)
 {
-       struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
        struct safexcel_crypto_priv *priv = ctx->priv;
        int i;
 
-       if (len != CHACHA_KEY_SIZE) {
-               crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
-               return -EINVAL;
-       }
-
        if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
                for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++) {
                        if (ctx->key[i] !=
        for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++)
                ctx->key[i] = get_unaligned_le32(key + i * sizeof(u32));
        ctx->key_len = CHACHA_KEY_SIZE;
+}
+
+static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
+                                            const u8 *key, unsigned int len)
+{
+       struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
+
+       if (len != CHACHA_KEY_SIZE) {
+               crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+               return -EINVAL;
+       }
+       safexcel_chacha20_setkey(ctx, key);
 
        return 0;
 }
                },
        },
 };
+
+static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
+                                   const u8 *key, unsigned int len)
+{
+       struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
+
+       if (ctx->aead  == EIP197_AEAD_TYPE_IPSEC_ESP &&
+           len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
+               /* ESP variant has nonce appended to key */
+               len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
+               ctx->nonce = *(u32 *)(key + len);
+       }
+       if (len != CHACHA_KEY_SIZE) {
+               crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+               return -EINVAL;
+       }
+       safexcel_chacha20_setkey(ctx, key);
+
+       return 0;
+}
+
+static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
+                                        unsigned int authsize)
+{
+       if (authsize != POLY1305_DIGEST_SIZE)
+               return -EINVAL;
+       return 0;
+}
+
+static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
+                                         enum safexcel_cipher_direction dir)
+{
+       struct safexcel_cipher_req *creq = aead_request_ctx(req);
+       struct crypto_aead *aead = crypto_aead_reqtfm(req);
+       struct crypto_tfm *tfm = crypto_aead_tfm(aead);
+       struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+       struct aead_request *subreq = aead_request_ctx(req);
+       u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
+       int i, ret = 0;
+
+       /*
+        * Instead of wasting time detecting umpteen silly corner cases,
+        * just dump all "small" requests to the fallback implementation.
+        * HW would not be faster on such small requests anyway.
+        */
+       if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
+                   req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
+                  req->cryptlen > POLY1305_DIGEST_SIZE)) {
+               return safexcel_queue_req(&req->base, creq, dir);
+       }
+
+       /* HW cannot do full (AAD+payload) zero length, use fallback */
+       for (i = 0; i < CHACHA_KEY_SIZE / sizeof(u32); i++)
+               key[i] = cpu_to_le32(ctx->key[i]);
+       if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
+               /* ESP variant has nonce appended to the key */
+               key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
+               ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
+                                        CHACHA_KEY_SIZE +
+                                        EIP197_AEAD_IPSEC_NONCE_SIZE);
+       } else {
+               ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
+                                        CHACHA_KEY_SIZE);
+       }
+       if (ret) {
+               crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
+               crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
+                                           CRYPTO_TFM_REQ_MASK);
+               return ret;
+       }
+
+       aead_request_set_tfm(subreq, ctx->fback);
+       aead_request_set_callback(subreq, req->base.flags, req->base.complete,
+                                 req->base.data);
+       aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
+                              req->iv);
+       aead_request_set_ad(subreq, req->assoclen);
+
+       return (dir ==  SAFEXCEL_ENCRYPT) ?
+               crypto_aead_encrypt(subreq) :
+               crypto_aead_decrypt(subreq);
+}
+
+static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
+{
+       return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
+}
+
+static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
+{
+       return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
+}
+
+static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
+{
+       struct crypto_aead *aead = __crypto_aead_cast(tfm);
+       struct aead_alg *alg = crypto_aead_alg(aead);
+       struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+       safexcel_aead_cra_init(tfm);
+       ctx->alg  = SAFEXCEL_CHACHA20;
+       ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
+                   CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
+       ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
+       ctx->state_sz = 0; /* Precomputed by HW */
+
+       /* Allocate fallback implementation */
+       ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
+                                      CRYPTO_ALG_ASYNC |
+                                      CRYPTO_ALG_NEED_FALLBACK);
+       if (IS_ERR(ctx->fback))
+               return PTR_ERR(ctx->fback);
+
+       crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
+                                         sizeof(struct aead_request) +
+                                         crypto_aead_reqsize(ctx->fback)));
+
+       return 0;
+}
+
+static void safexcel_aead_chachapoly_cra_exit(struct crypto_tfm *tfm)
+{
+       struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+       crypto_free_aead(ctx->fback);
+       safexcel_aead_cra_exit(tfm);
+}
+
+struct safexcel_alg_template safexcel_alg_chachapoly = {
+       .type = SAFEXCEL_ALG_TYPE_AEAD,
+       .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
+       .alg.aead = {
+               .setkey = safexcel_aead_chachapoly_setkey,
+               .setauthsize = safexcel_aead_chachapoly_setauthsize,
+               .encrypt = safexcel_aead_chachapoly_encrypt,
+               .decrypt = safexcel_aead_chachapoly_decrypt,
+               .ivsize = CHACHAPOLY_IV_SIZE,
+               .maxauthsize = POLY1305_DIGEST_SIZE,
+               .base = {
+                       .cra_name = "rfc7539(chacha20,poly1305)",
+                       .cra_driver_name = "safexcel-chacha20-poly1305",
+                       /* +1 to put it above HW chacha + SW poly */
+                       .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
+                       .cra_flags = CRYPTO_ALG_ASYNC |
+                                    CRYPTO_ALG_KERN_DRIVER_ONLY |
+                                    CRYPTO_ALG_NEED_FALLBACK,
+                       .cra_blocksize = 1,
+                       .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
+                       .cra_alignmask = 0,
+                       .cra_init = safexcel_aead_chachapoly_cra_init,
+                       .cra_exit = safexcel_aead_chachapoly_cra_exit,
+                       .cra_module = THIS_MODULE,
+               },
+       },
+};
+
+static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
+{
+       struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+       int ret;
+
+       ret = safexcel_aead_chachapoly_cra_init(tfm);
+       ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
+       return ret;
+}
+
+struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
+       .type = SAFEXCEL_ALG_TYPE_AEAD,
+       .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
+       .alg.aead = {
+               .setkey = safexcel_aead_chachapoly_setkey,
+               .setauthsize = safexcel_aead_chachapoly_setauthsize,
+               .encrypt = safexcel_aead_chachapoly_encrypt,
+               .decrypt = safexcel_aead_chachapoly_decrypt,
+               .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
+               .maxauthsize = POLY1305_DIGEST_SIZE,
+               .base = {
+                       .cra_name = "rfc7539esp(chacha20,poly1305)",
+                       .cra_driver_name = "safexcel-chacha20-poly1305-esp",
+                       /* +1 to put it above HW chacha + SW poly */
+                       .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
+                       .cra_flags = CRYPTO_ALG_ASYNC |
+                                    CRYPTO_ALG_KERN_DRIVER_ONLY |
+                                    CRYPTO_ALG_NEED_FALLBACK,
+                       .cra_blocksize = 1,
+                       .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
+                       .cra_alignmask = 0,
+                       .cra_init = safexcel_aead_chachapolyesp_cra_init,
+                       .cra_exit = safexcel_aead_chachapoly_cra_exit,
+                       .cra_module = THIS_MODULE,
+               },
+       },
+};