linux-crypto.vger.kernel.org archive mirror
 help / color / mirror / Atom feed
* [PATCH 0/2] Add support for the AES-XTS algorithm
@ 2019-07-26 15:00 Pascal van Leeuwen
  2019-07-26 15:00 ` [PATCH 1/2] crypto: inside-secure - Move static cipher alg & mode settings to init Pascal van Leeuwen
                   ` (2 more replies)
  0 siblings, 3 replies; 5+ messages in thread
From: Pascal van Leeuwen @ 2019-07-26 15:00 UTC (permalink / raw)
  To: linux-crypto; +Cc: antoine.tenart, herbert, davem, Pascal van Leeuwen

This patch set adds support for the AES-XTS skcipher algorithm.

Pascal van Leeuwen (3):
  crypto: inside-secure - Move static cipher alg & mode settings to init
  crypto: inside-secure - Add support for the AES-XTS algorithm

 drivers/crypto/inside-secure/safexcel.c        |   1 +
 drivers/crypto/inside-secure/safexcel.h        |   2 +
 drivers/crypto/inside-secure/safexcel_cipher.c | 360 ++++++++++++++----------
 3 files changed, 212 insertions(+), 151 deletions(-)

--
1.8.3.1

^ permalink raw reply	[flat|nested] 5+ messages in thread

* [PATCH 1/2] crypto: inside-secure - Move static cipher alg & mode settings to init
  2019-07-26 15:00 [PATCH 0/2] Add support for the AES-XTS algorithm Pascal van Leeuwen
@ 2019-07-26 15:00 ` Pascal van Leeuwen
  2019-07-26 15:00 ` [PATCH 2/2] crypto: inside-secure - Add support for the AES-XTS algorithm Pascal van Leeuwen
  2019-08-02  4:45 ` [PATCH 0/2] " Herbert Xu
  2 siblings, 0 replies; 5+ messages in thread
From: Pascal van Leeuwen @ 2019-07-26 15:00 UTC (permalink / raw)
  To: linux-crypto; +Cc: antoine.tenart, herbert, davem, Pascal van Leeuwen

ctx->alg and ctx->mode were set from safexcel_send_req through the
various safexcel_encrypt and _decrypt routines, but this makes little
sense as these are static per ciphersuite. So moved to _init instead,
in preparation of adding more ciphersuites.

Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
---
 drivers/crypto/inside-secure/safexcel_cipher.c | 258 +++++++++++--------------
 1 file changed, 111 insertions(+), 147 deletions(-)

diff --git a/drivers/crypto/inside-secure/safexcel_cipher.c b/drivers/crypto/inside-secure/safexcel_cipher.c
index 80c7e5c..45b83a3 100644
--- a/drivers/crypto/inside-secure/safexcel_cipher.c
+++ b/drivers/crypto/inside-secure/safexcel_cipher.c
@@ -916,8 +916,7 @@ static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)

 static int safexcel_queue_req(struct crypto_async_request *base,
 			struct safexcel_cipher_req *sreq,
-			enum safexcel_cipher_direction dir, u32 mode,
-			enum safexcel_cipher_alg alg)
+			enum safexcel_cipher_direction dir)
 {
 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
 	struct safexcel_crypto_priv *priv = ctx->priv;
@@ -925,8 +924,6 @@ static int safexcel_queue_req(struct crypto_async_request *base,

 	sreq->needs_inv = false;
 	sreq->direction = dir;
-	ctx->alg = alg;
-	ctx->mode = mode;

 	if (ctx->base.ctxr) {
 		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
@@ -954,18 +951,16 @@ static int safexcel_queue_req(struct crypto_async_request *base,
 	return ret;
 }

-static int safexcel_ecb_aes_encrypt(struct skcipher_request *req)
+static int safexcel_encrypt(struct skcipher_request *req)
 {
 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
-			SAFEXCEL_AES);
+			SAFEXCEL_ENCRYPT);
 }

-static int safexcel_ecb_aes_decrypt(struct skcipher_request *req)
+static int safexcel_decrypt(struct skcipher_request *req)
 {
 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
-			SAFEXCEL_AES);
+			SAFEXCEL_DECRYPT);
 }

 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
@@ -1039,12 +1034,22 @@ static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
 	}
 }

+static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
+{
+	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+	safexcel_skcipher_cra_init(tfm);
+	ctx->alg  = SAFEXCEL_AES;
+	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
+	return 0;
+}
+
 struct safexcel_alg_template safexcel_alg_ecb_aes = {
 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
 	.alg.skcipher = {
 		.setkey = safexcel_skcipher_aes_setkey,
-		.encrypt = safexcel_ecb_aes_encrypt,
-		.decrypt = safexcel_ecb_aes_decrypt,
+		.encrypt = safexcel_encrypt,
+		.decrypt = safexcel_decrypt,
 		.min_keysize = AES_MIN_KEY_SIZE,
 		.max_keysize = AES_MAX_KEY_SIZE,
 		.base = {
@@ -1056,33 +1061,29 @@ struct safexcel_alg_template safexcel_alg_ecb_aes = {
 			.cra_blocksize = AES_BLOCK_SIZE,
 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
 			.cra_alignmask = 0,
-			.cra_init = safexcel_skcipher_cra_init,
+			.cra_init = safexcel_skcipher_aes_ecb_cra_init,
 			.cra_exit = safexcel_skcipher_cra_exit,
 			.cra_module = THIS_MODULE,
 		},
 	},
 };

-static int safexcel_cbc_aes_encrypt(struct skcipher_request *req)
+static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
 {
-	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
-			SAFEXCEL_AES);
-}
+	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

-static int safexcel_cbc_aes_decrypt(struct skcipher_request *req)
-{
-	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
-			SAFEXCEL_AES);
+	safexcel_skcipher_cra_init(tfm);
+	ctx->alg  = SAFEXCEL_AES;
+	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
+	return 0;
 }

 struct safexcel_alg_template safexcel_alg_cbc_aes = {
 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
 	.alg.skcipher = {
 		.setkey = safexcel_skcipher_aes_setkey,
-		.encrypt = safexcel_cbc_aes_encrypt,
-		.decrypt = safexcel_cbc_aes_decrypt,
+		.encrypt = safexcel_encrypt,
+		.decrypt = safexcel_decrypt,
 		.min_keysize = AES_MIN_KEY_SIZE,
 		.max_keysize = AES_MAX_KEY_SIZE,
 		.ivsize = AES_BLOCK_SIZE,
@@ -1095,27 +1096,13 @@ struct safexcel_alg_template safexcel_alg_cbc_aes = {
 			.cra_blocksize = AES_BLOCK_SIZE,
 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
 			.cra_alignmask = 0,
-			.cra_init = safexcel_skcipher_cra_init,
+			.cra_init = safexcel_skcipher_aes_cbc_cra_init,
 			.cra_exit = safexcel_skcipher_cra_exit,
 			.cra_module = THIS_MODULE,
 		},
 	},
 };

-static int safexcel_ctr_aes_encrypt(struct skcipher_request *req)
-{
-	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD,
-			SAFEXCEL_AES);
-}
-
-static int safexcel_ctr_aes_decrypt(struct skcipher_request *req)
-{
-	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD,
-			SAFEXCEL_AES);
-}
-
 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
 					   const u8 *key, unsigned int len)
 {
@@ -1154,12 +1141,22 @@ static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
 	return 0;
 }

+static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
+{
+	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+	safexcel_skcipher_cra_init(tfm);
+	ctx->alg  = SAFEXCEL_AES;
+	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
+	return 0;
+}
+
 struct safexcel_alg_template safexcel_alg_ctr_aes = {
 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
 	.alg.skcipher = {
 		.setkey = safexcel_skcipher_aesctr_setkey,
-		.encrypt = safexcel_ctr_aes_encrypt,
-		.decrypt = safexcel_ctr_aes_decrypt,
+		.encrypt = safexcel_encrypt,
+		.decrypt = safexcel_decrypt,
 		/* Add 4 to include the 4 byte nonce! */
 		.min_keysize = AES_MIN_KEY_SIZE + 4,
 		.max_keysize = AES_MAX_KEY_SIZE + 4,
@@ -1173,27 +1170,13 @@ struct safexcel_alg_template safexcel_alg_ctr_aes = {
 			.cra_blocksize = 1,
 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
 			.cra_alignmask = 0,
-			.cra_init = safexcel_skcipher_cra_init,
+			.cra_init = safexcel_skcipher_aes_ctr_cra_init,
 			.cra_exit = safexcel_skcipher_cra_exit,
 			.cra_module = THIS_MODULE,
 		},
 	},
 };

-static int safexcel_cbc_des_encrypt(struct skcipher_request *req)
-{
-	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
-			SAFEXCEL_DES);
-}
-
-static int safexcel_cbc_des_decrypt(struct skcipher_request *req)
-{
-	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
-			SAFEXCEL_DES);
-}
-
 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
 			       unsigned int len)
 {
@@ -1224,12 +1207,22 @@ static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
 	return 0;
 }

+static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
+{
+	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+	safexcel_skcipher_cra_init(tfm);
+	ctx->alg  = SAFEXCEL_DES;
+	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
+	return 0;
+}
+
 struct safexcel_alg_template safexcel_alg_cbc_des = {
 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
 	.alg.skcipher = {
 		.setkey = safexcel_des_setkey,
-		.encrypt = safexcel_cbc_des_encrypt,
-		.decrypt = safexcel_cbc_des_decrypt,
+		.encrypt = safexcel_encrypt,
+		.decrypt = safexcel_decrypt,
 		.min_keysize = DES_KEY_SIZE,
 		.max_keysize = DES_KEY_SIZE,
 		.ivsize = DES_BLOCK_SIZE,
@@ -1242,33 +1235,29 @@ struct safexcel_alg_template safexcel_alg_cbc_des = {
 			.cra_blocksize = DES_BLOCK_SIZE,
 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
 			.cra_alignmask = 0,
-			.cra_init = safexcel_skcipher_cra_init,
+			.cra_init = safexcel_skcipher_des_cbc_cra_init,
 			.cra_exit = safexcel_skcipher_cra_exit,
 			.cra_module = THIS_MODULE,
 		},
 	},
 };

-static int safexcel_ecb_des_encrypt(struct skcipher_request *req)
+static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
 {
-	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
-			SAFEXCEL_DES);
-}
+	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

-static int safexcel_ecb_des_decrypt(struct skcipher_request *req)
-{
-	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
-			SAFEXCEL_DES);
+	safexcel_skcipher_cra_init(tfm);
+	ctx->alg  = SAFEXCEL_DES;
+	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
+	return 0;
 }

 struct safexcel_alg_template safexcel_alg_ecb_des = {
 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
 	.alg.skcipher = {
 		.setkey = safexcel_des_setkey,
-		.encrypt = safexcel_ecb_des_encrypt,
-		.decrypt = safexcel_ecb_des_decrypt,
+		.encrypt = safexcel_encrypt,
+		.decrypt = safexcel_decrypt,
 		.min_keysize = DES_KEY_SIZE,
 		.max_keysize = DES_KEY_SIZE,
 		.base = {
@@ -1280,27 +1269,13 @@ struct safexcel_alg_template safexcel_alg_ecb_des = {
 			.cra_blocksize = DES_BLOCK_SIZE,
 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
 			.cra_alignmask = 0,
-			.cra_init = safexcel_skcipher_cra_init,
+			.cra_init = safexcel_skcipher_des_ecb_cra_init,
 			.cra_exit = safexcel_skcipher_cra_exit,
 			.cra_module = THIS_MODULE,
 		},
 	},
 };

-static int safexcel_cbc_des3_ede_encrypt(struct skcipher_request *req)
-{
-	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
-			SAFEXCEL_3DES);
-}
-
-static int safexcel_cbc_des3_ede_decrypt(struct skcipher_request *req)
-{
-	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
-			SAFEXCEL_3DES);
-}
-
 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
 				   const u8 *key, unsigned int len)
 {
@@ -1324,12 +1299,22 @@ static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
 	return 0;
 }

+static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
+{
+	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+	safexcel_skcipher_cra_init(tfm);
+	ctx->alg  = SAFEXCEL_3DES;
+	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
+	return 0;
+}
+
 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
 	.alg.skcipher = {
 		.setkey = safexcel_des3_ede_setkey,
-		.encrypt = safexcel_cbc_des3_ede_encrypt,
-		.decrypt = safexcel_cbc_des3_ede_decrypt,
+		.encrypt = safexcel_encrypt,
+		.decrypt = safexcel_decrypt,
 		.min_keysize = DES3_EDE_KEY_SIZE,
 		.max_keysize = DES3_EDE_KEY_SIZE,
 		.ivsize = DES3_EDE_BLOCK_SIZE,
@@ -1342,33 +1327,29 @@ struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
 			.cra_alignmask = 0,
-			.cra_init = safexcel_skcipher_cra_init,
+			.cra_init = safexcel_skcipher_des3_cbc_cra_init,
 			.cra_exit = safexcel_skcipher_cra_exit,
 			.cra_module = THIS_MODULE,
 		},
 	},
 };

-static int safexcel_ecb_des3_ede_encrypt(struct skcipher_request *req)
+static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
 {
-	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
-			SAFEXCEL_3DES);
-}
+	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);

-static int safexcel_ecb_des3_ede_decrypt(struct skcipher_request *req)
-{
-	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
-			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
-			SAFEXCEL_3DES);
+	safexcel_skcipher_cra_init(tfm);
+	ctx->alg  = SAFEXCEL_3DES;
+	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
+	return 0;
 }

 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
 	.alg.skcipher = {
 		.setkey = safexcel_des3_ede_setkey,
-		.encrypt = safexcel_ecb_des3_ede_encrypt,
-		.decrypt = safexcel_ecb_des3_ede_decrypt,
+		.encrypt = safexcel_encrypt,
+		.decrypt = safexcel_decrypt,
 		.min_keysize = DES3_EDE_KEY_SIZE,
 		.max_keysize = DES3_EDE_KEY_SIZE,
 		.base = {
@@ -1380,27 +1361,25 @@ struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
 			.cra_alignmask = 0,
-			.cra_init = safexcel_skcipher_cra_init,
+			.cra_init = safexcel_skcipher_des3_ecb_cra_init,
 			.cra_exit = safexcel_skcipher_cra_exit,
 			.cra_module = THIS_MODULE,
 		},
 	},
 };

-static int safexcel_aead_encrypt_aes(struct aead_request *req)
+static int safexcel_aead_encrypt(struct aead_request *req)
 {
 	struct safexcel_cipher_req *creq = aead_request_ctx(req);

-	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT,
-			CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
+	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
 }

-static int safexcel_aead_decrypt_aes(struct aead_request *req)
+static int safexcel_aead_decrypt(struct aead_request *req)
 {
 	struct safexcel_cipher_req *creq = aead_request_ctx(req);

-	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT,
-			CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
+	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
 }

 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
@@ -1416,6 +1395,7 @@ static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
 	ctx->priv = tmpl->priv;

 	ctx->alg  = SAFEXCEL_AES; /* default */
+	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
 	ctx->aead = true;
 	ctx->base.send = safexcel_aead_send;
 	ctx->base.handle_result = safexcel_aead_handle_result;
@@ -1436,8 +1416,8 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
 	.type = SAFEXCEL_ALG_TYPE_AEAD,
 	.alg.aead = {
 		.setkey = safexcel_aead_setkey,
-		.encrypt = safexcel_aead_encrypt_aes,
-		.decrypt = safexcel_aead_decrypt_aes,
+		.encrypt = safexcel_aead_encrypt,
+		.decrypt = safexcel_aead_decrypt,
 		.ivsize = AES_BLOCK_SIZE,
 		.maxauthsize = SHA1_DIGEST_SIZE,
 		.base = {
@@ -1470,8 +1450,8 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
 	.type = SAFEXCEL_ALG_TYPE_AEAD,
 	.alg.aead = {
 		.setkey = safexcel_aead_setkey,
-		.encrypt = safexcel_aead_encrypt_aes,
-		.decrypt = safexcel_aead_decrypt_aes,
+		.encrypt = safexcel_aead_encrypt,
+		.decrypt = safexcel_aead_decrypt,
 		.ivsize = AES_BLOCK_SIZE,
 		.maxauthsize = SHA256_DIGEST_SIZE,
 		.base = {
@@ -1504,8 +1484,8 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
 	.type = SAFEXCEL_ALG_TYPE_AEAD,
 	.alg.aead = {
 		.setkey = safexcel_aead_setkey,
-		.encrypt = safexcel_aead_encrypt_aes,
-		.decrypt = safexcel_aead_decrypt_aes,
+		.encrypt = safexcel_aead_encrypt,
+		.decrypt = safexcel_aead_decrypt,
 		.ivsize = AES_BLOCK_SIZE,
 		.maxauthsize = SHA224_DIGEST_SIZE,
 		.base = {
@@ -1538,8 +1518,8 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
 	.type = SAFEXCEL_ALG_TYPE_AEAD,
 	.alg.aead = {
 		.setkey = safexcel_aead_setkey,
-		.encrypt = safexcel_aead_encrypt_aes,
-		.decrypt = safexcel_aead_decrypt_aes,
+		.encrypt = safexcel_aead_encrypt,
+		.decrypt = safexcel_aead_decrypt,
 		.ivsize = AES_BLOCK_SIZE,
 		.maxauthsize = SHA512_DIGEST_SIZE,
 		.base = {
@@ -1572,8 +1552,8 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
 	.type = SAFEXCEL_ALG_TYPE_AEAD,
 	.alg.aead = {
 		.setkey = safexcel_aead_setkey,
-		.encrypt = safexcel_aead_encrypt_aes,
-		.decrypt = safexcel_aead_decrypt_aes,
+		.encrypt = safexcel_aead_encrypt,
+		.decrypt = safexcel_aead_decrypt,
 		.ivsize = AES_BLOCK_SIZE,
 		.maxauthsize = SHA384_DIGEST_SIZE,
 		.base = {
@@ -1601,28 +1581,12 @@ static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
 	return 0;
 }

-static int safexcel_aead_encrypt_3des(struct aead_request *req)
-{
-	struct safexcel_cipher_req *creq = aead_request_ctx(req);
-
-	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT,
-			CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_3DES);
-}
-
-static int safexcel_aead_decrypt_3des(struct aead_request *req)
-{
-	struct safexcel_cipher_req *creq = aead_request_ctx(req);
-
-	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT,
-			CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_3DES);
-}
-
 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
 	.type = SAFEXCEL_ALG_TYPE_AEAD,
 	.alg.aead = {
 		.setkey = safexcel_aead_setkey,
-		.encrypt = safexcel_aead_encrypt_3des,
-		.decrypt = safexcel_aead_decrypt_3des,
+		.encrypt = safexcel_aead_encrypt,
+		.decrypt = safexcel_aead_decrypt,
 		.ivsize = DES3_EDE_BLOCK_SIZE,
 		.maxauthsize = SHA1_DIGEST_SIZE,
 		.base = {
@@ -1654,8 +1618,8 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
 	.type = SAFEXCEL_ALG_TYPE_AEAD,
 	.alg.aead = {
 		.setkey = safexcel_aead_setkey,
-		.encrypt = safexcel_aead_encrypt_aes,
-		.decrypt = safexcel_aead_decrypt_aes,
+		.encrypt = safexcel_aead_encrypt,
+		.decrypt = safexcel_aead_decrypt,
 		.ivsize = 8,
 		.maxauthsize = SHA1_DIGEST_SIZE,
 		.base = {
@@ -1687,8 +1651,8 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
 	.type = SAFEXCEL_ALG_TYPE_AEAD,
 	.alg.aead = {
 		.setkey = safexcel_aead_setkey,
-		.encrypt = safexcel_aead_encrypt_aes,
-		.decrypt = safexcel_aead_decrypt_aes,
+		.encrypt = safexcel_aead_encrypt,
+		.decrypt = safexcel_aead_decrypt,
 		.ivsize = 8,
 		.maxauthsize = SHA256_DIGEST_SIZE,
 		.base = {
@@ -1720,8 +1684,8 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
 	.type = SAFEXCEL_ALG_TYPE_AEAD,
 	.alg.aead = {
 		.setkey = safexcel_aead_setkey,
-		.encrypt = safexcel_aead_encrypt_aes,
-		.decrypt = safexcel_aead_decrypt_aes,
+		.encrypt = safexcel_aead_encrypt,
+		.decrypt = safexcel_aead_decrypt,
 		.ivsize = 8,
 		.maxauthsize = SHA224_DIGEST_SIZE,
 		.base = {
@@ -1753,8 +1717,8 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
 	.type = SAFEXCEL_ALG_TYPE_AEAD,
 	.alg.aead = {
 		.setkey = safexcel_aead_setkey,
-		.encrypt = safexcel_aead_encrypt_aes,
-		.decrypt = safexcel_aead_decrypt_aes,
+		.encrypt = safexcel_aead_encrypt,
+		.decrypt = safexcel_aead_decrypt,
 		.ivsize = 8,
 		.maxauthsize = SHA512_DIGEST_SIZE,
 		.base = {
@@ -1786,8 +1750,8 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
 	.type = SAFEXCEL_ALG_TYPE_AEAD,
 	.alg.aead = {
 		.setkey = safexcel_aead_setkey,
-		.encrypt = safexcel_aead_encrypt_aes,
-		.decrypt = safexcel_aead_decrypt_aes,
+		.encrypt = safexcel_aead_encrypt,
+		.decrypt = safexcel_aead_decrypt,
 		.ivsize = 8,
 		.maxauthsize = SHA384_DIGEST_SIZE,
 		.base = {
--
1.8.3.1

^ permalink raw reply related	[flat|nested] 5+ messages in thread

* [PATCH 2/2] crypto: inside-secure - Add support for the AES-XTS algorithm
  2019-07-26 15:00 [PATCH 0/2] Add support for the AES-XTS algorithm Pascal van Leeuwen
  2019-07-26 15:00 ` [PATCH 1/2] crypto: inside-secure - Move static cipher alg & mode settings to init Pascal van Leeuwen
@ 2019-07-26 15:00 ` Pascal van Leeuwen
  2019-08-02  4:45 ` [PATCH 0/2] " Herbert Xu
  2 siblings, 0 replies; 5+ messages in thread
From: Pascal van Leeuwen @ 2019-07-26 15:00 UTC (permalink / raw)
  To: linux-crypto; +Cc: antoine.tenart, herbert, davem, Pascal van Leeuwen

This patch adds support for the AES-XTS skcipher algorithm.

Signed-off-by: Pascal van Leeuwen <pvanleeuwen@verimatrix.com>
---
 drivers/crypto/inside-secure/safexcel.c        |   1 +
 drivers/crypto/inside-secure/safexcel.h        |   2 +
 drivers/crypto/inside-secure/safexcel_cipher.c | 102 ++++++++++++++++++++++++-
 3 files changed, 101 insertions(+), 4 deletions(-)

diff --git a/drivers/crypto/inside-secure/safexcel.c b/drivers/crypto/inside-secure/safexcel.c
index ca84119..45443bf 100644
--- a/drivers/crypto/inside-secure/safexcel.c
+++ b/drivers/crypto/inside-secure/safexcel.c
@@ -1004,6 +1004,7 @@ static int safexcel_request_ring_irq(void *pdev, int irqid,
 	&safexcel_alg_authenc_hmac_sha256_ctr_aes,
 	&safexcel_alg_authenc_hmac_sha384_ctr_aes,
 	&safexcel_alg_authenc_hmac_sha512_ctr_aes,
+	&safexcel_alg_xts_aes,
 };

 static int safexcel_register_algorithms(struct safexcel_crypto_priv *priv)
diff --git a/drivers/crypto/inside-secure/safexcel.h b/drivers/crypto/inside-secure/safexcel.h
index 6687ff9..dcc060c 100644
--- a/drivers/crypto/inside-secure/safexcel.h
+++ b/drivers/crypto/inside-secure/safexcel.h
@@ -329,6 +329,7 @@ struct safexcel_context_record {
 #define CONTEXT_CONTROL_CRYPTO_MODE_ECB		(0 << 0)
 #define CONTEXT_CONTROL_CRYPTO_MODE_CBC		(1 << 0)
 #define CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD	(6 << 0)
+#define CONTEXT_CONTROL_CRYPTO_MODE_XTS		(7 << 0)
 #define CONTEXT_CONTROL_IV0			BIT(5)
 #define CONTEXT_CONTROL_IV1			BIT(6)
 #define CONTEXT_CONTROL_IV2			BIT(7)
@@ -744,5 +745,6 @@ int safexcel_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
 extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes;
 extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes;
 extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes;
+extern struct safexcel_alg_template safexcel_alg_xts_aes;

 #endif
diff --git a/drivers/crypto/inside-secure/safexcel_cipher.c b/drivers/crypto/inside-secure/safexcel_cipher.c
index 45b83a3..d65e5f7 100644
--- a/drivers/crypto/inside-secure/safexcel_cipher.c
+++ b/drivers/crypto/inside-secure/safexcel_cipher.c
@@ -39,9 +39,9 @@ struct safexcel_cipher_ctx {
 	enum safexcel_cipher_alg alg;
 	bool aead;

-	__le32 key[8];
+	__le32 key[16];
 	u32 nonce;
-	unsigned int key_len;
+	unsigned int key_len, xts;

 	/* All the below is AEAD specific */
 	u32 hash_alg;
@@ -368,7 +368,7 @@ static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
 	} else if (ctx->alg == SAFEXCEL_3DES) {
 		cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_3DES;
 	} else if (ctx->alg == SAFEXCEL_AES) {
-		switch (ctx->key_len) {
+		switch (ctx->key_len >> ctx->xts) {
 		case AES_KEYSIZE_128:
 			cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128;
 			break;
@@ -380,7 +380,7 @@ static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
 			break;
 		default:
 			dev_err(priv->dev, "aes keysize not supported: %u\n",
-				ctx->key_len);
+				ctx->key_len >> ctx->xts);
 			return -EINVAL;
 		}
 	}
@@ -1769,3 +1769,97 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
 		},
 	},
 };
+
+static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
+					   const u8 *key, unsigned int len)
+{
+	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
+	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+	struct safexcel_crypto_priv *priv = ctx->priv;
+	struct crypto_aes_ctx aes;
+	int ret, i;
+	unsigned int keylen;
+
+	/* Only half of the key data is cipher key */
+	keylen = (len >> 1) + (len & 1);
+	ret = crypto_aes_expand_key(&aes, key, keylen);
+	if (ret) {
+		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+		return ret;
+	}
+
+	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
+		for (i = 0; i < keylen / sizeof(u32); i++) {
+			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
+				ctx->base.needs_inv = true;
+				break;
+			}
+		}
+	}
+
+	for (i = 0; i < keylen / sizeof(u32); i++)
+		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
+
+	/* The other half is the tweak key */
+	ret = crypto_aes_expand_key(&aes, (u8 *)(key + keylen), keylen);
+	if (ret) {
+		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
+		return ret;
+	}
+
+	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
+		for (i = 0; i < keylen / sizeof(u32); i++) {
+			if (ctx->key[i + keylen / sizeof(u32)] !=
+			    cpu_to_le32(aes.key_enc[i])) {
+				ctx->base.needs_inv = true;
+				break;
+			}
+		}
+	}
+
+	for (i = 0; i < keylen / sizeof(u32); i++)
+		ctx->key[i + keylen / sizeof(u32)] =
+			cpu_to_le32(aes.key_enc[i]);
+
+	ctx->key_len = keylen << 1;
+
+	memzero_explicit(&aes, sizeof(aes));
+	return 0;
+}
+
+static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
+{
+	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
+
+	safexcel_skcipher_cra_init(tfm);
+	ctx->alg  = SAFEXCEL_AES;
+	ctx->xts  = 1;
+	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
+	return 0;
+}
+
+struct safexcel_alg_template safexcel_alg_xts_aes = {
+	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
+	.alg.skcipher = {
+		.setkey = safexcel_skcipher_aesxts_setkey,
+		.encrypt = safexcel_encrypt,
+		.decrypt = safexcel_decrypt,
+		/* Add 4 to include the 4 byte nonce! */
+		.min_keysize = AES_MIN_KEY_SIZE * 2,
+		.max_keysize = AES_MAX_KEY_SIZE * 2,
+		.ivsize = 16,
+		.base = {
+			.cra_name = "xts(aes)",
+			.cra_driver_name = "safexcel-xts-aes",
+			.cra_priority = 300,
+			.cra_flags = CRYPTO_ALG_ASYNC |
+				     CRYPTO_ALG_KERN_DRIVER_ONLY,
+			.cra_blocksize = 1,
+			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
+			.cra_alignmask = 0,
+			.cra_init = safexcel_skcipher_aes_xts_cra_init,
+			.cra_exit = safexcel_skcipher_cra_exit,
+			.cra_module = THIS_MODULE,
+		},
+	},
+};
--
1.8.3.1

^ permalink raw reply related	[flat|nested] 5+ messages in thread

* Re: [PATCH 0/2] Add support for the AES-XTS algorithm
  2019-07-26 15:00 [PATCH 0/2] Add support for the AES-XTS algorithm Pascal van Leeuwen
  2019-07-26 15:00 ` [PATCH 1/2] crypto: inside-secure - Move static cipher alg & mode settings to init Pascal van Leeuwen
  2019-07-26 15:00 ` [PATCH 2/2] crypto: inside-secure - Add support for the AES-XTS algorithm Pascal van Leeuwen
@ 2019-08-02  4:45 ` Herbert Xu
  2019-08-02  9:04   ` Pascal Van Leeuwen
  2 siblings, 1 reply; 5+ messages in thread
From: Herbert Xu @ 2019-08-02  4:45 UTC (permalink / raw)
  To: Pascal van Leeuwen
  Cc: linux-crypto, antoine.tenart, davem, Pascal van Leeuwen

On Fri, Jul 26, 2019 at 05:00:31PM +0200, Pascal van Leeuwen wrote:
> This patch set adds support for the AES-XTS skcipher algorithm.
> 
> Pascal van Leeuwen (3):
>   crypto: inside-secure - Move static cipher alg & mode settings to init
>   crypto: inside-secure - Add support for the AES-XTS algorithm
> 
>  drivers/crypto/inside-secure/safexcel.c        |   1 +
>  drivers/crypto/inside-secure/safexcel.h        |   2 +
>  drivers/crypto/inside-secure/safexcel_cipher.c | 360 ++++++++++++++----------
>  3 files changed, 212 insertions(+), 151 deletions(-)

This patch series doesn't apply against cryptodev.  Please resubmit.

Thanks,
-- 
Email: Herbert Xu <herbert@gondor.apana.org.au>
Home Page: http://gondor.apana.org.au/~herbert/
PGP Key: http://gondor.apana.org.au/~herbert/pubkey.txt

^ permalink raw reply	[flat|nested] 5+ messages in thread

* RE: [PATCH 0/2] Add support for the AES-XTS algorithm
  2019-08-02  4:45 ` [PATCH 0/2] " Herbert Xu
@ 2019-08-02  9:04   ` Pascal Van Leeuwen
  0 siblings, 0 replies; 5+ messages in thread
From: Pascal Van Leeuwen @ 2019-08-02  9:04 UTC (permalink / raw)
  To: Herbert Xu, Pascal van Leeuwen; +Cc: linux-crypto, antoine.tenart, davem

> -----Original Message-----
> From: linux-crypto-owner@vger.kernel.org <linux-crypto-owner@vger.kernel.org> On Behalf Of
> Herbert Xu
> Sent: Friday, August 2, 2019 6:45 AM
> To: Pascal van Leeuwen <pascalvanl@gmail.com>
> Cc: linux-crypto@vger.kernel.org; antoine.tenart@bootlin.com; davem@davemloft.net; Pascal
> Van Leeuwen <pvanleeuwen@verimatrix.com>
> Subject: Re: [PATCH 0/2] Add support for the AES-XTS algorithm
> 
> On Fri, Jul 26, 2019 at 05:00:31PM +0200, Pascal van Leeuwen wrote:
> > This patch set adds support for the AES-XTS skcipher algorithm.
> >
> > Pascal van Leeuwen (3):
> >   crypto: inside-secure - Move static cipher alg & mode settings to init
> >   crypto: inside-secure - Add support for the AES-XTS algorithm
> >
> >  drivers/crypto/inside-secure/safexcel.c        |   1 +
> >  drivers/crypto/inside-secure/safexcel.h        |   2 +
> >  drivers/crypto/inside-secure/safexcel_cipher.c | 360 ++++++++++++++----------
> >  3 files changed, 212 insertions(+), 151 deletions(-)
> 
> This patch series doesn't apply against cryptodev.  Please resubmit.
> 
This may depend on my very first patchset which has not been accepted yet.
Sorry about that. I will resubmit it as soon as that gets merged. 

Regards,
Pascal van Leeuwen
Silicon IP Architect, Multi-Protocol Engines @ Verimatrix
www.insidesecure.com

> Thanks,
> --
> Email: Herbert Xu <herbert@gondor.apana.org.au>
> Home Page: http://gondor.apana.org.au/~herbert/
> PGP Key: http://gondor.apana.org.au/~herbert/pubkey.txt

^ permalink raw reply	[flat|nested] 5+ messages in thread

end of thread, other threads:[~2019-08-02  9:04 UTC | newest]

Thread overview: 5+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2019-07-26 15:00 [PATCH 0/2] Add support for the AES-XTS algorithm Pascal van Leeuwen
2019-07-26 15:00 ` [PATCH 1/2] crypto: inside-secure - Move static cipher alg & mode settings to init Pascal van Leeuwen
2019-07-26 15:00 ` [PATCH 2/2] crypto: inside-secure - Add support for the AES-XTS algorithm Pascal van Leeuwen
2019-08-02  4:45 ` [PATCH 0/2] " Herbert Xu
2019-08-02  9:04   ` Pascal Van Leeuwen

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for NNTP newsgroup(s).