All of lore.kernel.org
 help / color / mirror / Atom feed
From: Herbert Xu <herbert@gondor.apana.org.au>
To: Linux Crypto Mailing List <linux-crypto@vger.kernel.org>
Subject: [PATCH 14/15] crypto: lskcipher - Export incremental interface internally
Date: Sat, 30 Dec 2023 15:16:51 +0800	[thread overview]
Message-ID: <c6382ec09c1e724e54b9842aaf82e609071b0503.1707815065.git.herbert@gondor.apana.org.au> (raw)
In-Reply-To: <cover.1707815065.git.herbert@gondor.apana.org.au>

Export the incremental interface internally so that composite
algorithms such as adiantum can use it.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
---
 crypto/lskcipher.c                 | 45 +++++++++++++++++++++---------
 include/crypto/internal/skcipher.h | 42 ++++++++++++++++++++++++++++
 2 files changed, 74 insertions(+), 13 deletions(-)

diff --git a/crypto/lskcipher.c b/crypto/lskcipher.c
index 00ea963a2d2d..e8b97e4fd579 100644
--- a/crypto/lskcipher.c
+++ b/crypto/lskcipher.c
@@ -87,8 +87,9 @@ EXPORT_SYMBOL_GPL(crypto_lskcipher_setkey);
 
 static int crypto_lskcipher_crypt_unaligned(
 	struct crypto_lskcipher *tfm, const u8 *src, u8 *dst, unsigned len,
-	u8 *iv, int (*crypt)(struct crypto_lskcipher *tfm, const u8 *src,
-			     u8 *dst, unsigned len, u8 *iv, u32 flags))
+	u8 *iv, u32 flags,
+	int (*crypt)(struct crypto_lskcipher *tfm, const u8 *src,
+		     u8 *dst, unsigned len, u8 *iv, u32 flags))
 {
 	unsigned statesize = crypto_lskcipher_statesize(tfm);
 	unsigned ivsize = crypto_lskcipher_ivsize(tfm);
@@ -120,7 +121,7 @@ static int crypto_lskcipher_crypt_unaligned(
 			chunk &= ~(cs - 1);
 
 		memcpy(p, src, chunk);
-		err = crypt(tfm, p, p, chunk, tiv, CRYPTO_LSKCIPHER_FLAG_FINAL);
+		err = crypt(tfm, p, p, chunk, tiv, flags);
 		if (err)
 			goto out;
 
@@ -140,7 +141,7 @@ static int crypto_lskcipher_crypt_unaligned(
 }
 
 static int crypto_lskcipher_crypt(struct crypto_lskcipher *tfm, const u8 *src,
-				  u8 *dst, unsigned len, u8 *iv,
+				  u8 *dst, unsigned len, u8 *iv, u32 flags,
 				  int (*crypt)(struct crypto_lskcipher *tfm,
 					       const u8 *src, u8 *dst,
 					       unsigned len, u8 *iv,
@@ -153,18 +154,18 @@ static int crypto_lskcipher_crypt(struct crypto_lskcipher *tfm, const u8 *src,
 	if (((unsigned long)src | (unsigned long)dst | (unsigned long)iv) &
 	    alignmask) {
 		ret = crypto_lskcipher_crypt_unaligned(tfm, src, dst, len, iv,
-						       crypt);
+						       flags, crypt);
 		goto out;
 	}
 
-	ret = crypt(tfm, src, dst, len, iv, CRYPTO_LSKCIPHER_FLAG_FINAL);
+	ret = crypt(tfm, src, dst, len, iv, flags);
 
 out:
 	return crypto_lskcipher_errstat(alg, ret);
 }
 
-int crypto_lskcipher_encrypt(struct crypto_lskcipher *tfm, const u8 *src,
-			     u8 *dst, unsigned len, u8 *iv)
+int crypto_lskcipher_encrypt_ext(struct crypto_lskcipher *tfm, const u8 *src,
+				 u8 *dst, unsigned len, u8 *iv, u32 flags)
 {
 	struct lskcipher_alg *alg = crypto_lskcipher_alg(tfm);
 
@@ -175,12 +176,13 @@ int crypto_lskcipher_encrypt(struct crypto_lskcipher *tfm, const u8 *src,
 		atomic64_add(len, &istat->encrypt_tlen);
 	}
 
-	return crypto_lskcipher_crypt(tfm, src, dst, len, iv, alg->encrypt);
+	return crypto_lskcipher_crypt(tfm, src, dst, len, iv, flags,
+				      alg->encrypt);
 }
-EXPORT_SYMBOL_GPL(crypto_lskcipher_encrypt);
+EXPORT_SYMBOL_GPL(crypto_lskcipher_encrypt_ext);
 
-int crypto_lskcipher_decrypt(struct crypto_lskcipher *tfm, const u8 *src,
-			     u8 *dst, unsigned len, u8 *iv)
+int crypto_lskcipher_decrypt_ext(struct crypto_lskcipher *tfm, const u8 *src,
+				 u8 *dst, unsigned len, u8 *iv, u32 flags)
 {
 	struct lskcipher_alg *alg = crypto_lskcipher_alg(tfm);
 
@@ -191,7 +193,24 @@ int crypto_lskcipher_decrypt(struct crypto_lskcipher *tfm, const u8 *src,
 		atomic64_add(len, &istat->decrypt_tlen);
 	}
 
-	return crypto_lskcipher_crypt(tfm, src, dst, len, iv, alg->decrypt);
+	return crypto_lskcipher_crypt(tfm, src, dst, len, iv, flags,
+				      alg->decrypt);
+}
+EXPORT_SYMBOL_GPL(crypto_lskcipher_decrypt_ext);
+
+int crypto_lskcipher_encrypt(struct crypto_lskcipher *tfm, const u8 *src,
+			     u8 *dst, unsigned len, u8 *iv)
+{
+	return crypto_lskcipher_encrypt_ext(tfm, src, dst, len, iv,
+					    CRYPTO_LSKCIPHER_FLAG_FINAL);
+}
+EXPORT_SYMBOL_GPL(crypto_lskcipher_encrypt);
+
+int crypto_lskcipher_decrypt(struct crypto_lskcipher *tfm, const u8 *src,
+			     u8 *dst, unsigned len, u8 *iv)
+{
+	return crypto_lskcipher_decrypt_ext(tfm, src, dst, len, iv,
+					    CRYPTO_LSKCIPHER_FLAG_FINAL);
 }
 EXPORT_SYMBOL_GPL(crypto_lskcipher_decrypt);
 
diff --git a/include/crypto/internal/skcipher.h b/include/crypto/internal/skcipher.h
index 1e35e7719b22..0d43153f3cd2 100644
--- a/include/crypto/internal/skcipher.h
+++ b/include/crypto/internal/skcipher.h
@@ -90,6 +90,48 @@ struct skcipher_walk {
 	unsigned int alignmask;
 };
 
+/**
+ * crypto_lskcipher_encrypt_ext() - encrypt plaintext with continuation
+ * @tfm: lskcipher handle
+ * @src: source buffer
+ * @dst: destination buffer
+ * @len: number of bytes to process
+ * @siv: IV + state for the cipher operation.  The length of the IV must
+ *	 comply with the IV size defined by crypto_lskcipher_ivsize.  The
+ *	 IV is then followed with a buffer with the length as specified by
+ *	 crypto_lskcipher_statesize.
+ * @flags: Indicates whether this is a continuation and/or final operation.
+ *
+ * Encrypt plaintext data using the lskcipher handle with continuation.
+ *
+ * Return: >=0 if the cipher operation was successful, if positive
+ *	   then this many bytes have been left unprocessed;
+ *	   < 0 if an error occurred
+ */
+int crypto_lskcipher_encrypt_ext(struct crypto_lskcipher *tfm, const u8 *src,
+				 u8 *dst, unsigned len, u8 *siv, u32 flags);
+
+/**
+ * crypto_lskcipher_decrypt_ext() - decrypt ciphertext with continuation
+ * @tfm: lskcipher handle
+ * @src: source buffer
+ * @dst: destination buffer
+ * @len: number of bytes to process
+ * @siv: IV + state for the cipher operation.  The length of the IV must
+ *	 comply with the IV size defined by crypto_lskcipher_ivsize.  The
+ *	 IV is then followed with a buffer with the length as specified by
+ *	 crypto_lskcipher_statesize.
+ * @flags: Indicates whether this is a continuation and/or final operation.
+ *
+ * Decrypt ciphertext data using the lskcipher handle with continuation.
+ *
+ * Return: >=0 if the cipher operation was successful, if positive
+ *	   then this many bytes have been left unprocessed;
+ *	   < 0 if an error occurred
+ */
+int crypto_lskcipher_decrypt_ext(struct crypto_lskcipher *tfm, const u8 *src,
+				 u8 *dst, unsigned len, u8 *siv, u32 flags);
+
 static inline struct crypto_instance *skcipher_crypto_instance(
 	struct skcipher_instance *inst)
 {
-- 
Email: Herbert Xu <herbert@gondor.apana.org.au>
Home Page: http://gondor.apana.org.au/~herbert/
PGP Key: http://gondor.apana.org.au/~herbert/pubkey.txt


  parent reply	other threads:[~2024-02-13  9:16 UTC|newest]

Thread overview: 30+ messages / expand[flat|nested]  mbox.gz  Atom feed  top
2024-02-13  9:04 [PATCH 00/15] crypto: Add twopass lskcipher for adiantum Herbert Xu
2023-12-02  4:55 ` [PATCH 01/15] crypto: skcipher - Add tailsize attribute Herbert Xu
2024-02-14 23:44   ` Eric Biggers
2024-02-15  6:40     ` Herbert Xu
2024-02-23  6:01       ` Eric Biggers
2023-12-02  5:42 ` [PATCH 02/15] crypto: algif_skcipher - Add support for tailsize Herbert Xu
2023-12-04 10:24 ` [PATCH 04/15] crypto: xts - Convert from skcipher to lskcipher Herbert Xu
2023-12-05  6:09 ` [PATCH 05/15] crypto: skcipher - Add twopass attribute Herbert Xu
2023-12-05  6:13 ` [PATCH 06/15] crypto: algif_skcipher - Disallow nonincremental algorithms Herbert Xu
2024-02-14 22:56   ` Eric Biggers
2024-02-15  6:47     ` Herbert Xu
2024-02-23  6:00       ` Eric Biggers
2023-12-05  9:52 ` [PATCH 07/15] crypto: adiantum - Use lskcipher instead of cipher Herbert Xu
2023-12-06  4:46 ` [PATCH 08/15] crypto: skcipher - Add incremental support to lskcipher wrapper Herbert Xu
2023-12-06  5:49 ` [PATCH 09/15] crypto: chacha-generic - Convert from skcipher to lskcipher Herbert Xu
2024-02-14 23:41   ` Eric Biggers
2024-02-15  6:52     ` Herbert Xu
2023-12-06  6:05 ` [PATCH 10/15] crypto: skcipher - Move nesting check into ecb Herbert Xu
2023-12-06  8:55 ` [PATCH 11/15] crypto: skcipher - Propagate zero-length requests to lskcipher Herbert Xu
2023-12-07 10:03 ` [PATCH 03/15] crypto: skcipher - Remove ivsize check for lskcipher simple templates Herbert Xu
2023-12-07 10:13 ` [PATCH 12/15] crypto: cts - Convert from skcipher to lskcipher Herbert Xu
2023-12-29 10:47 ` [PATCH 13/15] crypto: cts,xts - Update parameters blocksize/chunksize/tailsize Herbert Xu
2024-02-14 23:00   ` Eric Biggers
2024-02-15  7:57     ` Herbert Xu
2024-02-23  6:09       ` Eric Biggers
2023-12-30  7:16 ` Herbert Xu [this message]
2024-02-13  8:48 ` [PATCH 15/15] crypto: adiantum - Convert from skcipher to lskcipher Herbert Xu
2024-02-14 23:35 ` [PATCH 00/15] crypto: Add twopass lskcipher for adiantum Eric Biggers
2024-02-15  8:20   ` Herbert Xu
2024-02-23  6:39     ` Eric Biggers

Reply instructions:

You may reply publicly to this message via plain-text email
using any one of the following methods:

* Save the following mbox file, import it into your mail client,
  and reply-to-all from there: mbox

  Avoid top-posting and favor interleaved quoting:
  https://en.wikipedia.org/wiki/Posting_style#Interleaved_style

* Reply using the --to, --cc, and --in-reply-to
  switches of git-send-email(1):

  git send-email \
    --in-reply-to=c6382ec09c1e724e54b9842aaf82e609071b0503.1707815065.git.herbert@gondor.apana.org.au \
    --to=herbert@gondor.apana.org.au \
    --cc=linux-crypto@vger.kernel.org \
    /path/to/YOUR_REPLY

  https://kernel.org/pub/software/scm/git/docs/git-send-email.html

* If your mail client supports setting the In-Reply-To header
  via mailto: links, try the mailto: link
Be sure your reply has a Subject: header at the top and a blank line before the message body.
This is an external index of several public inboxes,
see mirroring instructions on how to clone and mirror
all data and code used by this external index.