Linux-Crypto Archive on lore.kernel.org
 help / color / Atom feed
From: Ard Biesheuvel <ardb@kernel.org>
To: Herbert Xu <herbert@gondor.apana.org.au>
Cc: Stephan Mueller <smueller@chronox.de>,
	Linux Crypto Mailing List <linux-crypto@vger.kernel.org>,
	Eric Biggers <ebiggers@kernel.org>
Subject: Re: [v3 PATCH 12/31] crypto: arm64/chacha - Add support for chaining
Date: Wed, 29 Jul 2020 09:16:55 +0300
Message-ID: <CAMj1kXFj9-+LCbrLT3VSY_nq3MsyRigFhBEkf9BCosH-UJ+YsQ@mail.gmail.com> (raw)
In-Reply-To: <E1k0JtB-0006Np-A3@fornost.hmeau.com>

On Tue, 28 Jul 2020 at 10:19, Herbert Xu <herbert@gondor.apana.org.au> wrote:
>
> As it stands chacha cannot do chaining.  That is, it has to handle
> each request as a whole.  This patch adds support for chaining when
> the CRYPTO_TFM_REQ_MORE flag is set.
>
> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

Only state[12] needs to be preserved, since it contains the block
counter. Everything else in the state can be derived from the IV.

So by doing the init unconditionally, and overriding state[12] to the
captured value (if it exists), we can get rid of the redundant copy of
state, which also avoids inconsistencies if IV and state are out of
sync.

> ---
>
>  arch/arm64/crypto/chacha-neon-glue.c |   43 ++++++++++++++++++++++-------------
>  1 file changed, 28 insertions(+), 15 deletions(-)
>
> diff --git a/arch/arm64/crypto/chacha-neon-glue.c b/arch/arm64/crypto/chacha-neon-glue.c
> index af2bbca38e70f..d82c574ddcc00 100644
> --- a/arch/arm64/crypto/chacha-neon-glue.c
> +++ b/arch/arm64/crypto/chacha-neon-glue.c
> @@ -19,10 +19,8 @@
>   * (at your option) any later version.
>   */
>
> -#include <crypto/algapi.h>
>  #include <crypto/internal/chacha.h>
>  #include <crypto/internal/simd.h>
> -#include <crypto/internal/skcipher.h>
>  #include <linux/jump_label.h>
>  #include <linux/kernel.h>
>  #include <linux/module.h>
> @@ -101,16 +99,16 @@ void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
>  }
>  EXPORT_SYMBOL(chacha_crypt_arch);
>
> -static int chacha_neon_stream_xor(struct skcipher_request *req,
> -                                 const struct chacha_ctx *ctx, const u8 *iv)
> +static int chacha_neon_stream_xor(struct skcipher_request *req, int nrounds)
>  {
> +       struct chacha_reqctx *rctx = skcipher_request_ctx(req);
>         struct skcipher_walk walk;
> -       u32 state[16];
> +       u32 *state = rctx->state;
>         int err;
>
> -       err = skcipher_walk_virt(&walk, req, false);
> +       rctx->init = req->base.flags & CRYPTO_TFM_REQ_MORE;
>
> -       chacha_init_generic(state, ctx->key, iv);
> +       err = skcipher_walk_virt(&walk, req, false);
>
>         while (walk.nbytes > 0) {
>                 unsigned int nbytes = walk.nbytes;
> @@ -122,11 +120,11 @@ static int chacha_neon_stream_xor(struct skcipher_request *req,
>                     !crypto_simd_usable()) {
>                         chacha_crypt_generic(state, walk.dst.virt.addr,
>                                              walk.src.virt.addr, nbytes,
> -                                            ctx->nrounds);
> +                                            nrounds);
>                 } else {
>                         kernel_neon_begin();
>                         chacha_doneon(state, walk.dst.virt.addr,
> -                                     walk.src.virt.addr, nbytes, ctx->nrounds);
> +                                     walk.src.virt.addr, nbytes, nrounds);
>                         kernel_neon_end();
>                 }
>                 err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
> @@ -138,26 +136,38 @@ static int chacha_neon_stream_xor(struct skcipher_request *req,
>  static int chacha_neon(struct skcipher_request *req)
>  {
>         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
> +       struct chacha_reqctx *rctx = skcipher_request_ctx(req);
>         struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
>
> -       return chacha_neon_stream_xor(req, ctx, req->iv);
> +       if (!rctx->init)
> +               chacha_init_generic(rctx->state, ctx->key, req->iv);
> +
> +       return chacha_neon_stream_xor(req, ctx->nrounds);
>  }
>
>  static int xchacha_neon(struct skcipher_request *req)
>  {
>         struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
> +       struct chacha_reqctx *rctx = skcipher_request_ctx(req);
>         struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
> -       struct chacha_ctx subctx;
> -       u32 state[16];
> +       int nrounds = ctx->nrounds;
> +       u32 *state = rctx->state;
>         u8 real_iv[16];
> +       u32 key[8];
> +
> +       if (rctx->init)
> +               goto skip_init;
>
>         chacha_init_generic(state, ctx->key, req->iv);
> -       hchacha_block_arch(state, subctx.key, ctx->nrounds);
> -       subctx.nrounds = ctx->nrounds;
> +       hchacha_block_arch(state, key, nrounds);
>
>         memcpy(&real_iv[0], req->iv + 24, 8);
>         memcpy(&real_iv[8], req->iv + 16, 8);
> -       return chacha_neon_stream_xor(req, &subctx, real_iv);
> +
> +       chacha_init_generic(state, key, real_iv);
> +
> +skip_init:
> +       return chacha_neon_stream_xor(req, nrounds);
>  }
>
>  static struct skcipher_alg algs[] = {
> @@ -174,6 +184,7 @@ static struct skcipher_alg algs[] = {
>                 .ivsize                 = CHACHA_IV_SIZE,
>                 .chunksize              = CHACHA_BLOCK_SIZE,
>                 .walksize               = 5 * CHACHA_BLOCK_SIZE,
> +               .reqsize                = sizeof(struct chacha_reqctx),
>                 .setkey                 = chacha20_setkey,
>                 .encrypt                = chacha_neon,
>                 .decrypt                = chacha_neon,
> @@ -190,6 +201,7 @@ static struct skcipher_alg algs[] = {
>                 .ivsize                 = XCHACHA_IV_SIZE,
>                 .chunksize              = CHACHA_BLOCK_SIZE,
>                 .walksize               = 5 * CHACHA_BLOCK_SIZE,
> +               .reqsize                = sizeof(struct chacha_reqctx),
>                 .setkey                 = chacha20_setkey,
>                 .encrypt                = xchacha_neon,
>                 .decrypt                = xchacha_neon,
> @@ -206,6 +218,7 @@ static struct skcipher_alg algs[] = {
>                 .ivsize                 = XCHACHA_IV_SIZE,
>                 .chunksize              = CHACHA_BLOCK_SIZE,
>                 .walksize               = 5 * CHACHA_BLOCK_SIZE,
> +               .reqsize                = sizeof(struct chacha_reqctx),
>                 .setkey                 = chacha12_setkey,
>                 .encrypt                = xchacha_neon,
>                 .decrypt                = xchacha_neon,

  reply index

Thread overview: 58+ messages / expand[flat|nested]  mbox.gz  Atom feed  top
2020-07-28  7:17 [v3 PATCH 0/31] crypto: skcipher - Add support for no chaining and partial chaining Herbert Xu
2020-07-28  7:18 ` [v3 PATCH 1/31] crypto: skcipher - Add final chunk size field for chaining Herbert Xu
2020-07-28 17:15   ` Eric Biggers
2020-07-28 17:22     ` Herbert Xu
2020-07-28 17:26       ` Ard Biesheuvel
2020-07-28 17:30         ` Herbert Xu
2020-07-28 17:46           ` Ard Biesheuvel
2020-07-28 22:12             ` Herbert Xu
2020-07-28  7:18 ` [v3 PATCH 2/31] crypto: algif_skcipher - Add support for final_chunksize Herbert Xu
2020-07-28  7:18 ` [v3 PATCH 3/31] crypto: cts - Add support for chaining Herbert Xu
2020-07-28 11:05   ` Ard Biesheuvel
2020-07-28 11:53     ` Herbert Xu
2020-07-28 11:59       ` Ard Biesheuvel
2020-07-28 12:03         ` Herbert Xu
2020-07-28 12:08           ` Ard Biesheuvel
2020-07-28 12:19             ` Herbert Xu
2020-07-28  7:18 ` [v3 PATCH 4/31] crypto: arm64/aes-glue - Add support for chaining CTS Herbert Xu
2020-07-28  7:18 ` [v3 PATCH 5/31] crypto: nitrox " Herbert Xu
2020-07-28  7:18 ` [v3 PATCH 6/31] crypto: ccree " Herbert Xu
2020-07-28  7:18 ` [v3 PATCH 7/31] crypto: skcipher - Add alg reqsize field Herbert Xu
2020-07-28  7:18 ` [v3 PATCH 8/31] crypto: skcipher - Initialise requests to zero Herbert Xu
2020-07-28 17:10   ` Eric Biggers
2020-07-29  3:38     ` Herbert Xu
2020-07-28  7:18 ` [v3 PATCH 9/31] crypto: cryptd - Add support for chaining Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 10/31] crypto: chacha-generic " Herbert Xu
2020-08-10 15:20   ` Horia Geantă
2020-08-11  0:57     ` Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 11/31] crypto: arm/chacha " Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 12/31] crypto: arm64/chacha " Herbert Xu
2020-07-29  6:16   ` Ard Biesheuvel [this message]
2020-07-29  6:28     ` Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 13/31] crypto: mips/chacha " Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 14/31] crypto: x86/chacha " Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 15/31] crypto: inside-secure - Set final_chunksize on chacha Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 16/31] crypto: caam/qi2 " Herbert Xu
2020-08-10 15:24   ` Horia Geantă
2020-07-28  7:19 ` [v3 PATCH 17/31] crypto: ctr - Allow rfc3686 to be chained Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 18/31] crypto: crypto4xx - Remove rfc3686 implementation Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 19/31] crypto: caam - Remove rfc3686 implementations Herbert Xu
2020-08-10 16:47   ` Horia Geantă
2020-08-11  0:59     ` Herbert Xu
2020-08-11  7:32       ` Horia Geantă
2020-08-11  7:34         ` Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 20/31] crypto: nitrox - Set final_chunksize on rfc3686 Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 21/31] crypto: ccp - Remove rfc3686 implementation Herbert Xu
2020-08-06 19:16   ` John Allen
2020-07-28  7:19 ` [v3 PATCH 22/31] crypto: chelsio " Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 23/31] crypto: inside-secure - Set final_chunksize on rfc3686 Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 24/31] crypto: ixp4xx - Remove rfc3686 implementation Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 25/31] crypto: nx - Set final_chunksize on rfc3686 Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 26/31] crypto: essiv - Set final_chunksize Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 27/31] crypto: simd - Add support for chaining Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 28/31] crypto: arm64/essiv - Set final_chunksize Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 29/31] crypto: ccree - Set final_chunksize on essiv Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 30/31] crypto: kw - Set final_chunksize Herbert Xu
2020-07-28  7:19 ` [v3 PATCH 31/31] crypto: salsa20-generic - dd support for chaining Herbert Xu
2020-07-28 17:19 ` [v3 PATCH 0/31] crypto: skcipher - Add support for no chaining and partial chaining Eric Biggers
2020-07-29  3:40   ` Herbert Xu

Reply instructions:

You may reply publicly to this message via plain-text email
using any one of the following methods:

* Save the following mbox file, import it into your mail client,
  and reply-to-all from there: mbox

  Avoid top-posting and favor interleaved quoting:
  https://en.wikipedia.org/wiki/Posting_style#Interleaved_style

* Reply using the --to, --cc, and --in-reply-to
  switches of git-send-email(1):

  git send-email \
    --in-reply-to=CAMj1kXFj9-+LCbrLT3VSY_nq3MsyRigFhBEkf9BCosH-UJ+YsQ@mail.gmail.com \
    --to=ardb@kernel.org \
    --cc=ebiggers@kernel.org \
    --cc=herbert@gondor.apana.org.au \
    --cc=linux-crypto@vger.kernel.org \
    --cc=smueller@chronox.de \
    /path/to/YOUR_REPLY

  https://kernel.org/pub/software/scm/git/docs/git-send-email.html

* If your mail client supports setting the In-Reply-To header
  via mailto: links, try the mailto: link

Linux-Crypto Archive on lore.kernel.org

Archives are clonable:
	git clone --mirror https://lore.kernel.org/linux-crypto/0 linux-crypto/git/0.git

	# If you have public-inbox 1.1+ installed, you may
	# initialize and index your mirror using the following commands:
	public-inbox-init -V2 linux-crypto linux-crypto/ https://lore.kernel.org/linux-crypto \
		linux-crypto@vger.kernel.org
	public-inbox-index linux-crypto

Example config snippet for mirrors

Newsgroup available over NNTP:
	nntp://nntp.lore.kernel.org/org.kernel.vger.linux-crypto


AGPL code for this site: git clone https://public-inbox.org/public-inbox.git