rctx->datbuf.size = rctx->len;
        rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->datbuf.size,
                                              &rctx->datbuf.addr, GFP_KERNEL);
-       if (!rctx->datbuf.buf)
-               return -ENOMEM;
+       if (!rctx->datbuf.buf) {
+               ret = -ENOMEM;
+               goto out_finalize;
+       }
 
        scatterwalk_map_and_copy(rctx->datbuf.buf, req->src, 0, req->cryptlen, 0);
 
        dma_free_coherent(ctx->se->dev, rctx->datbuf.size,
                          rctx->datbuf.buf, rctx->datbuf.addr);
 
+out_finalize:
        crypto_finalize_skcipher_request(se->engine, req, ret);
 
        return 0;
 
        ret = tegra_ccm_crypt_init(req, se, rctx);
        if (ret)
-               return ret;
+               goto out_finalize;
 
        /* Allocate buffers required */
        rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen + 100;
        rctx->inbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->inbuf.size,
                                             &rctx->inbuf.addr, GFP_KERNEL);
        if (!rctx->inbuf.buf)
-               return -ENOMEM;
+               goto out_finalize;
 
        rctx->outbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen + 100;
        rctx->outbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->outbuf.size,
                                              &rctx->outbuf.addr, GFP_KERNEL);
        if (!rctx->outbuf.buf) {
                ret = -ENOMEM;
-               goto outbuf_err;
+               goto out_free_inbuf;
        }
 
        if (rctx->encrypt) {
        dma_free_coherent(ctx->se->dev, rctx->inbuf.size,
                          rctx->outbuf.buf, rctx->outbuf.addr);
 
-outbuf_err:
+out_free_inbuf:
        dma_free_coherent(ctx->se->dev, rctx->outbuf.size,
                          rctx->inbuf.buf, rctx->inbuf.addr);
 
+out_finalize:
        crypto_finalize_aead_request(ctx->se->engine, req, ret);
 
        return 0;
        rctx->inbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen;
        rctx->inbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->inbuf.size,
                                             &rctx->inbuf.addr, GFP_KERNEL);
-       if (!rctx->inbuf.buf)
-               return -ENOMEM;
+       if (!rctx->inbuf.buf) {
+               ret = -ENOMEM;
+               goto out_finalize;
+       }
 
        rctx->outbuf.size = rctx->assoclen + rctx->authsize + rctx->cryptlen;
        rctx->outbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->outbuf.size,
                                              &rctx->outbuf.addr, GFP_KERNEL);
        if (!rctx->outbuf.buf) {
                ret = -ENOMEM;
-               goto outbuf_err;
+               goto out_free_inbuf;
        }
 
        /* If there is associated data perform GMAC operation */
        dma_free_coherent(ctx->se->dev, rctx->outbuf.size,
                          rctx->outbuf.buf, rctx->outbuf.addr);
 
-outbuf_err:
+out_free_inbuf:
        dma_free_coherent(ctx->se->dev, rctx->inbuf.size,
                          rctx->inbuf.buf, rctx->inbuf.addr);
 
-       /* Finalize the request if there are no errors */
+out_finalize:
        crypto_finalize_aead_request(ctx->se->engine, req, ret);
 
        return 0;