All of lore.kernel.org
 help / color / mirror / Atom feed
From: "Herbert Xu" <herbert@gondor.apana.org.au>
To: Catalin Marinas <catalin.marinas@arm.com>,
	Ard Biesheuvel <ardb@kernel.org>, Will Deacon <will@kernel.org>,
	Marc Zyngier <maz@kernel.org>, Arnd Bergmann <arnd@arndb.de>,
	Greg Kroah-Hartman <gregkh@linuxfoundation.org>,
	Andrew Morton <akpm@linux-foundation.org>,
	Linus Torvalds <torvalds@linux-foundation.org>,
	Linux Memory Management List <linux-mm@kvack.org>,
	Linux ARM <linux-arm-kernel@lists.infradead.org>,
	Linux Kernel Mailing List <linux-kernel@vger.kernel.org>,
	"David S. Miller" <davem@davemloft.net>,
	Linux Crypto Mailing List <linux-crypto@vger.kernel.org>
Subject: [RFC PATCH 7/7] crypto: caam - Explicitly request DMA alignment
Date: Tue, 10 May 2022 19:07:22 +0800	[thread overview]
Message-ID: <E1noNi6-00BzW8-4S@fornost.hmeau.com> (raw)
In-Reply-To: YnpGnsr4k7yVUR54@gondor.apana.org.au

This patch uses API helpers to explicitly request for DMA alignment
when allocating memory through the Crypto API.  Previously it was
implicitly assumed that all kmalloc memory is aligned for DMA.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
---

 drivers/crypto/caam/caamalg.c |  101 ++++++++++++++++++++++--------------------
 1 file changed, 55 insertions(+), 46 deletions(-)

diff --git a/drivers/crypto/caam/caamalg.c b/drivers/crypto/caam/caamalg.c
index d3d8bb0a69900..c12c678dcb0fd 100644
--- a/drivers/crypto/caam/caamalg.c
+++ b/drivers/crypto/caam/caamalg.c
@@ -131,7 +131,7 @@ struct caam_aead_req_ctx {
 
 static int aead_null_set_sh_desc(struct crypto_aead *aead)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 	u32 *desc;
@@ -184,7 +184,7 @@ static int aead_set_sh_desc(struct crypto_aead *aead)
 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
 						 struct caam_aead_alg, aead);
 	unsigned int ivsize = crypto_aead_ivsize(aead);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 	u32 ctx1_iv_off = 0;
@@ -312,7 +312,7 @@ static int aead_set_sh_desc(struct crypto_aead *aead)
 static int aead_setauthsize(struct crypto_aead *authenc,
 				    unsigned int authsize)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
 
 	ctx->authsize = authsize;
 	aead_set_sh_desc(authenc);
@@ -322,7 +322,7 @@ static int aead_setauthsize(struct crypto_aead *authenc,
 
 static int gcm_set_sh_desc(struct crypto_aead *aead)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	unsigned int ivsize = crypto_aead_ivsize(aead);
 	u32 *desc;
@@ -372,7 +372,7 @@ static int gcm_set_sh_desc(struct crypto_aead *aead)
 
 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
 	int err;
 
 	err = crypto_gcm_check_authsize(authsize);
@@ -387,7 +387,7 @@ static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
 
 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	unsigned int ivsize = crypto_aead_ivsize(aead);
 	u32 *desc;
@@ -440,7 +440,7 @@ static int rfc4106_set_sh_desc(struct crypto_aead *aead)
 static int rfc4106_setauthsize(struct crypto_aead *authenc,
 			       unsigned int authsize)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
 	int err;
 
 	err = crypto_rfc4106_check_authsize(authsize);
@@ -455,7 +455,7 @@ static int rfc4106_setauthsize(struct crypto_aead *authenc,
 
 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	unsigned int ivsize = crypto_aead_ivsize(aead);
 	u32 *desc;
@@ -508,7 +508,7 @@ static int rfc4543_set_sh_desc(struct crypto_aead *aead)
 static int rfc4543_setauthsize(struct crypto_aead *authenc,
 			       unsigned int authsize)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
 
 	if (authsize != 16)
 		return -EINVAL;
@@ -521,7 +521,7 @@ static int rfc4543_setauthsize(struct crypto_aead *authenc,
 
 static int chachapoly_set_sh_desc(struct crypto_aead *aead)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	unsigned int ivsize = crypto_aead_ivsize(aead);
 	u32 *desc;
@@ -547,7 +547,7 @@ static int chachapoly_set_sh_desc(struct crypto_aead *aead)
 static int chachapoly_setauthsize(struct crypto_aead *aead,
 				  unsigned int authsize)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 
 	if (authsize != POLY1305_DIGEST_SIZE)
 		return -EINVAL;
@@ -559,7 +559,7 @@ static int chachapoly_setauthsize(struct crypto_aead *aead,
 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
 			     unsigned int keylen)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	unsigned int ivsize = crypto_aead_ivsize(aead);
 	unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
 
@@ -575,7 +575,7 @@ static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
 static int aead_setkey(struct crypto_aead *aead,
 			       const u8 *key, unsigned int keylen)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 	struct crypto_authenc_keys keys;
@@ -656,7 +656,7 @@ static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
 static int gcm_setkey(struct crypto_aead *aead,
 		      const u8 *key, unsigned int keylen)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	int err;
 
@@ -677,7 +677,7 @@ static int gcm_setkey(struct crypto_aead *aead,
 static int rfc4106_setkey(struct crypto_aead *aead,
 			  const u8 *key, unsigned int keylen)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	int err;
 
@@ -703,7 +703,7 @@ static int rfc4106_setkey(struct crypto_aead *aead,
 static int rfc4543_setkey(struct crypto_aead *aead,
 			  const u8 *key, unsigned int keylen)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	int err;
 
@@ -729,7 +729,7 @@ static int rfc4543_setkey(struct crypto_aead *aead,
 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
 			   unsigned int keylen, const u32 ctx1_iv_off)
 {
-	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
 	struct caam_skcipher_alg *alg =
 		container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
 			     skcipher);
@@ -832,7 +832,7 @@ static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,
 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
 			       unsigned int keylen)
 {
-	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
 	struct device *jrdev = ctx->jrdev;
 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 	u32 *desc;
@@ -970,7 +970,7 @@ static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err,
 			    void *context)
 {
 	struct aead_request *req = context;
-	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
+	struct caam_aead_req_ctx *rctx = aead_request_ctx_dma(req);
 	struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
 	struct aead_edesc *edesc;
 	int ecode = 0;
@@ -1003,7 +1003,7 @@ static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err,
 {
 	struct skcipher_request *req = context;
 	struct skcipher_edesc *edesc;
-	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
+	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx_dma(req);
 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
 	struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
 	int ivsize = crypto_skcipher_ivsize(skcipher);
@@ -1057,7 +1057,7 @@ static void init_aead_job(struct aead_request *req,
 			  bool all_contig, bool encrypt)
 {
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	int authsize = ctx->authsize;
 	u32 *desc = edesc->hw_desc;
 	u32 out_options, in_options;
@@ -1118,7 +1118,7 @@ static void init_gcm_job(struct aead_request *req,
 			 bool all_contig, bool encrypt)
 {
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	unsigned int ivsize = crypto_aead_ivsize(aead);
 	u32 *desc = edesc->hw_desc;
 	bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
@@ -1185,7 +1185,7 @@ static void init_authenc_job(struct aead_request *req,
 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
 						 struct caam_aead_alg, aead);
 	unsigned int ivsize = crypto_aead_ivsize(aead);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
 			       OP_ALG_AAI_CTR_MOD128);
@@ -1234,7 +1234,7 @@ static void init_skcipher_job(struct skcipher_request *req,
 			      const bool encrypt)
 {
 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
-	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
 	struct device *jrdev = ctx->jrdev;
 	int ivsize = crypto_skcipher_ivsize(skcipher);
 	u32 *desc = edesc->hw_desc;
@@ -1290,9 +1290,9 @@ static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
 					   bool encrypt)
 {
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
-	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
+	struct caam_aead_req_ctx *rctx = aead_request_ctx_dma(req);
 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
 		       GFP_KERNEL : GFP_ATOMIC;
 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
@@ -1429,7 +1429,7 @@ static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
 static int aead_enqueue_req(struct device *jrdev, struct aead_request *req)
 {
 	struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
-	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
+	struct caam_aead_req_ctx *rctx = aead_request_ctx_dma(req);
 	struct aead_edesc *edesc = rctx->edesc;
 	u32 *desc = edesc->hw_desc;
 	int ret;
@@ -1457,7 +1457,7 @@ static inline int chachapoly_crypt(struct aead_request *req, bool encrypt)
 {
 	struct aead_edesc *edesc;
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	bool all_contig;
 	u32 *desc;
@@ -1491,7 +1491,7 @@ static inline int aead_crypt(struct aead_request *req, bool encrypt)
 {
 	struct aead_edesc *edesc;
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	bool all_contig;
 
@@ -1524,8 +1524,8 @@ static int aead_decrypt(struct aead_request *req)
 static int aead_do_one_req(struct crypto_engine *engine, void *areq)
 {
 	struct aead_request *req = aead_request_cast(areq);
-	struct caam_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
-	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(crypto_aead_reqtfm(req));
+	struct caam_aead_req_ctx *rctx = aead_request_ctx_dma(req);
 	u32 *desc = rctx->edesc->hw_desc;
 	int ret;
 
@@ -1550,7 +1550,7 @@ static inline int gcm_crypt(struct aead_request *req, bool encrypt)
 {
 	struct aead_edesc *edesc;
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	bool all_contig;
 
@@ -1597,8 +1597,8 @@ static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
 						   int desc_bytes)
 {
 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
-	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
-	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
+	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx_dma(req);
 	struct device *jrdev = ctx->jrdev;
 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
 		       GFP_KERNEL : GFP_ATOMIC;
@@ -1756,8 +1756,8 @@ static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
 static int skcipher_do_one_req(struct crypto_engine *engine, void *areq)
 {
 	struct skcipher_request *req = skcipher_request_cast(areq);
-	struct caam_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
-	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(crypto_skcipher_reqtfm(req));
+	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx_dma(req);
 	u32 *desc = rctx->edesc->hw_desc;
 	int ret;
 
@@ -1790,7 +1790,7 @@ static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
 {
 	struct skcipher_edesc *edesc;
 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
-	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
 	struct device *jrdev = ctx->jrdev;
 	struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
@@ -1807,7 +1807,8 @@ static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
 
 	if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) ||
 			      ctx->xts_key_fallback)) {
-		struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
+		struct caam_skcipher_req_ctx *rctx =
+			skcipher_request_ctx_dma(req);
 
 		skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
 		skcipher_request_set_callback(&rctx->fallback_req,
@@ -3397,7 +3398,7 @@ static int caam_cra_init(struct crypto_skcipher *tfm)
 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
 	struct caam_skcipher_alg *caam_alg =
 		container_of(alg, typeof(*caam_alg), skcipher);
-	struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm);
 	u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
 	int ret = 0;
 
@@ -3416,10 +3417,12 @@ static int caam_cra_init(struct crypto_skcipher *tfm)
 		}
 
 		ctx->fallback = fallback;
-		crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx) +
-					    crypto_skcipher_reqsize(fallback));
+		crypto_skcipher_set_reqsize_dma(
+			tfm, sizeof(struct caam_skcipher_req_ctx) +
+			     crypto_skcipher_reqsize(fallback));
 	} else {
-		crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx));
+		crypto_skcipher_set_reqsize_dma(
+			tfm, sizeof(struct caam_skcipher_req_ctx));
 	}
 
 	ret = caam_init_common(ctx, &caam_alg->caam, false);
@@ -3434,9 +3437,9 @@ static int caam_aead_init(struct crypto_aead *tfm)
 	struct aead_alg *alg = crypto_aead_alg(tfm);
 	struct caam_aead_alg *caam_alg =
 		 container_of(alg, struct caam_aead_alg, aead);
-	struct caam_ctx *ctx = crypto_aead_ctx(tfm);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(tfm);
 
-	crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx));
+	crypto_aead_set_reqsize_dma(tfm, sizeof(struct caam_aead_req_ctx));
 
 	ctx->enginectx.op.do_one_request = aead_do_one_req;
 
@@ -3454,7 +3457,7 @@ static void caam_exit_common(struct caam_ctx *ctx)
 
 static void caam_cra_exit(struct crypto_skcipher *tfm)
 {
-	struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm);
 
 	if (ctx->fallback)
 		crypto_free_skcipher(ctx->fallback);
@@ -3463,7 +3466,7 @@ static void caam_cra_exit(struct crypto_skcipher *tfm)
 
 static void caam_aead_exit(struct crypto_aead *tfm)
 {
-	caam_exit_common(crypto_aead_ctx(tfm));
+	caam_exit_common(crypto_aead_ctx_dma(tfm));
 }
 
 void caam_algapi_exit(void)
@@ -3492,6 +3495,9 @@ static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
 	alg->base.cra_module = THIS_MODULE;
 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
+#ifdef ARCH_DMA_MINALIGN
+	alg->base.cra_alignmask = ARCH_DMA_MINALIGN - 1;
+#endif
 	alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
 			      CRYPTO_ALG_KERN_DRIVER_ONLY);
 
@@ -3506,6 +3512,9 @@ static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
 	alg->base.cra_module = THIS_MODULE;
 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
+#ifdef ARCH_DMA_MINALIGN
+	alg->base.cra_alignmask = ARCH_DMA_MINALIGN - 1;
+#endif
 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
 			      CRYPTO_ALG_KERN_DRIVER_ONLY;
 

WARNING: multiple messages have this Message-ID (diff)
From: "Herbert Xu" <herbert@gondor.apana.org.au>
To: Catalin Marinas <catalin.marinas@arm.com>,
	Ard Biesheuvel <ardb@kernel.org>, Will Deacon <will@kernel.org>,
	Marc Zyngier <maz@kernel.org>, Arnd Bergmann <arnd@arndb.de>,
	Greg Kroah-Hartman <gregkh@linuxfoundation.org>,
	Andrew Morton <akpm@linux-foundation.org>,
	Linus Torvalds <torvalds@linux-foundation.org>,
	Linux Memory Management List <linux-mm@kvack.org>,
	Linux ARM <linux-arm-kernel@lists.infradead.org>,
	Linux Kernel Mailing List <linux-kernel@vger.kernel.org>,
	"David S. Miller" <davem@davemloft.net>,
	Linux Crypto Mailing List <linux-crypto@vger.kernel.org>
Subject: [RFC PATCH 7/7] crypto: caam - Explicitly request DMA alignment
Date: Tue, 10 May 2022 19:07:22 +0800	[thread overview]
Message-ID: <E1noNi6-00BzW8-4S@fornost.hmeau.com> (raw)
In-Reply-To: YnpGnsr4k7yVUR54@gondor.apana.org.au

This patch uses API helpers to explicitly request for DMA alignment
when allocating memory through the Crypto API.  Previously it was
implicitly assumed that all kmalloc memory is aligned for DMA.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
---

 drivers/crypto/caam/caamalg.c |  101 ++++++++++++++++++++++--------------------
 1 file changed, 55 insertions(+), 46 deletions(-)

diff --git a/drivers/crypto/caam/caamalg.c b/drivers/crypto/caam/caamalg.c
index d3d8bb0a69900..c12c678dcb0fd 100644
--- a/drivers/crypto/caam/caamalg.c
+++ b/drivers/crypto/caam/caamalg.c
@@ -131,7 +131,7 @@ struct caam_aead_req_ctx {
 
 static int aead_null_set_sh_desc(struct crypto_aead *aead)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 	u32 *desc;
@@ -184,7 +184,7 @@ static int aead_set_sh_desc(struct crypto_aead *aead)
 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
 						 struct caam_aead_alg, aead);
 	unsigned int ivsize = crypto_aead_ivsize(aead);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 	u32 ctx1_iv_off = 0;
@@ -312,7 +312,7 @@ static int aead_set_sh_desc(struct crypto_aead *aead)
 static int aead_setauthsize(struct crypto_aead *authenc,
 				    unsigned int authsize)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
 
 	ctx->authsize = authsize;
 	aead_set_sh_desc(authenc);
@@ -322,7 +322,7 @@ static int aead_setauthsize(struct crypto_aead *authenc,
 
 static int gcm_set_sh_desc(struct crypto_aead *aead)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	unsigned int ivsize = crypto_aead_ivsize(aead);
 	u32 *desc;
@@ -372,7 +372,7 @@ static int gcm_set_sh_desc(struct crypto_aead *aead)
 
 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
 	int err;
 
 	err = crypto_gcm_check_authsize(authsize);
@@ -387,7 +387,7 @@ static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
 
 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	unsigned int ivsize = crypto_aead_ivsize(aead);
 	u32 *desc;
@@ -440,7 +440,7 @@ static int rfc4106_set_sh_desc(struct crypto_aead *aead)
 static int rfc4106_setauthsize(struct crypto_aead *authenc,
 			       unsigned int authsize)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
 	int err;
 
 	err = crypto_rfc4106_check_authsize(authsize);
@@ -455,7 +455,7 @@ static int rfc4106_setauthsize(struct crypto_aead *authenc,
 
 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	unsigned int ivsize = crypto_aead_ivsize(aead);
 	u32 *desc;
@@ -508,7 +508,7 @@ static int rfc4543_set_sh_desc(struct crypto_aead *aead)
 static int rfc4543_setauthsize(struct crypto_aead *authenc,
 			       unsigned int authsize)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
 
 	if (authsize != 16)
 		return -EINVAL;
@@ -521,7 +521,7 @@ static int rfc4543_setauthsize(struct crypto_aead *authenc,
 
 static int chachapoly_set_sh_desc(struct crypto_aead *aead)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	unsigned int ivsize = crypto_aead_ivsize(aead);
 	u32 *desc;
@@ -547,7 +547,7 @@ static int chachapoly_set_sh_desc(struct crypto_aead *aead)
 static int chachapoly_setauthsize(struct crypto_aead *aead,
 				  unsigned int authsize)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 
 	if (authsize != POLY1305_DIGEST_SIZE)
 		return -EINVAL;
@@ -559,7 +559,7 @@ static int chachapoly_setauthsize(struct crypto_aead *aead,
 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
 			     unsigned int keylen)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	unsigned int ivsize = crypto_aead_ivsize(aead);
 	unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
 
@@ -575,7 +575,7 @@ static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
 static int aead_setkey(struct crypto_aead *aead,
 			       const u8 *key, unsigned int keylen)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 	struct crypto_authenc_keys keys;
@@ -656,7 +656,7 @@ static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
 static int gcm_setkey(struct crypto_aead *aead,
 		      const u8 *key, unsigned int keylen)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	int err;
 
@@ -677,7 +677,7 @@ static int gcm_setkey(struct crypto_aead *aead,
 static int rfc4106_setkey(struct crypto_aead *aead,
 			  const u8 *key, unsigned int keylen)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	int err;
 
@@ -703,7 +703,7 @@ static int rfc4106_setkey(struct crypto_aead *aead,
 static int rfc4543_setkey(struct crypto_aead *aead,
 			  const u8 *key, unsigned int keylen)
 {
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	int err;
 
@@ -729,7 +729,7 @@ static int rfc4543_setkey(struct crypto_aead *aead,
 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
 			   unsigned int keylen, const u32 ctx1_iv_off)
 {
-	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
 	struct caam_skcipher_alg *alg =
 		container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
 			     skcipher);
@@ -832,7 +832,7 @@ static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,
 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
 			       unsigned int keylen)
 {
-	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
 	struct device *jrdev = ctx->jrdev;
 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 	u32 *desc;
@@ -970,7 +970,7 @@ static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err,
 			    void *context)
 {
 	struct aead_request *req = context;
-	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
+	struct caam_aead_req_ctx *rctx = aead_request_ctx_dma(req);
 	struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
 	struct aead_edesc *edesc;
 	int ecode = 0;
@@ -1003,7 +1003,7 @@ static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err,
 {
 	struct skcipher_request *req = context;
 	struct skcipher_edesc *edesc;
-	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
+	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx_dma(req);
 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
 	struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
 	int ivsize = crypto_skcipher_ivsize(skcipher);
@@ -1057,7 +1057,7 @@ static void init_aead_job(struct aead_request *req,
 			  bool all_contig, bool encrypt)
 {
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	int authsize = ctx->authsize;
 	u32 *desc = edesc->hw_desc;
 	u32 out_options, in_options;
@@ -1118,7 +1118,7 @@ static void init_gcm_job(struct aead_request *req,
 			 bool all_contig, bool encrypt)
 {
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	unsigned int ivsize = crypto_aead_ivsize(aead);
 	u32 *desc = edesc->hw_desc;
 	bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
@@ -1185,7 +1185,7 @@ static void init_authenc_job(struct aead_request *req,
 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
 						 struct caam_aead_alg, aead);
 	unsigned int ivsize = crypto_aead_ivsize(aead);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
 			       OP_ALG_AAI_CTR_MOD128);
@@ -1234,7 +1234,7 @@ static void init_skcipher_job(struct skcipher_request *req,
 			      const bool encrypt)
 {
 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
-	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
 	struct device *jrdev = ctx->jrdev;
 	int ivsize = crypto_skcipher_ivsize(skcipher);
 	u32 *desc = edesc->hw_desc;
@@ -1290,9 +1290,9 @@ static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
 					   bool encrypt)
 {
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
-	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
+	struct caam_aead_req_ctx *rctx = aead_request_ctx_dma(req);
 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
 		       GFP_KERNEL : GFP_ATOMIC;
 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
@@ -1429,7 +1429,7 @@ static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
 static int aead_enqueue_req(struct device *jrdev, struct aead_request *req)
 {
 	struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
-	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
+	struct caam_aead_req_ctx *rctx = aead_request_ctx_dma(req);
 	struct aead_edesc *edesc = rctx->edesc;
 	u32 *desc = edesc->hw_desc;
 	int ret;
@@ -1457,7 +1457,7 @@ static inline int chachapoly_crypt(struct aead_request *req, bool encrypt)
 {
 	struct aead_edesc *edesc;
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	bool all_contig;
 	u32 *desc;
@@ -1491,7 +1491,7 @@ static inline int aead_crypt(struct aead_request *req, bool encrypt)
 {
 	struct aead_edesc *edesc;
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	bool all_contig;
 
@@ -1524,8 +1524,8 @@ static int aead_decrypt(struct aead_request *req)
 static int aead_do_one_req(struct crypto_engine *engine, void *areq)
 {
 	struct aead_request *req = aead_request_cast(areq);
-	struct caam_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
-	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(crypto_aead_reqtfm(req));
+	struct caam_aead_req_ctx *rctx = aead_request_ctx_dma(req);
 	u32 *desc = rctx->edesc->hw_desc;
 	int ret;
 
@@ -1550,7 +1550,7 @@ static inline int gcm_crypt(struct aead_request *req, bool encrypt)
 {
 	struct aead_edesc *edesc;
 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
-	struct caam_ctx *ctx = crypto_aead_ctx(aead);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 	struct device *jrdev = ctx->jrdev;
 	bool all_contig;
 
@@ -1597,8 +1597,8 @@ static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
 						   int desc_bytes)
 {
 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
-	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
-	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
+	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx_dma(req);
 	struct device *jrdev = ctx->jrdev;
 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
 		       GFP_KERNEL : GFP_ATOMIC;
@@ -1756,8 +1756,8 @@ static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
 static int skcipher_do_one_req(struct crypto_engine *engine, void *areq)
 {
 	struct skcipher_request *req = skcipher_request_cast(areq);
-	struct caam_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
-	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(crypto_skcipher_reqtfm(req));
+	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx_dma(req);
 	u32 *desc = rctx->edesc->hw_desc;
 	int ret;
 
@@ -1790,7 +1790,7 @@ static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
 {
 	struct skcipher_edesc *edesc;
 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
-	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
 	struct device *jrdev = ctx->jrdev;
 	struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
@@ -1807,7 +1807,8 @@ static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
 
 	if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) ||
 			      ctx->xts_key_fallback)) {
-		struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
+		struct caam_skcipher_req_ctx *rctx =
+			skcipher_request_ctx_dma(req);
 
 		skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
 		skcipher_request_set_callback(&rctx->fallback_req,
@@ -3397,7 +3398,7 @@ static int caam_cra_init(struct crypto_skcipher *tfm)
 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
 	struct caam_skcipher_alg *caam_alg =
 		container_of(alg, typeof(*caam_alg), skcipher);
-	struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm);
 	u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
 	int ret = 0;
 
@@ -3416,10 +3417,12 @@ static int caam_cra_init(struct crypto_skcipher *tfm)
 		}
 
 		ctx->fallback = fallback;
-		crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx) +
-					    crypto_skcipher_reqsize(fallback));
+		crypto_skcipher_set_reqsize_dma(
+			tfm, sizeof(struct caam_skcipher_req_ctx) +
+			     crypto_skcipher_reqsize(fallback));
 	} else {
-		crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx));
+		crypto_skcipher_set_reqsize_dma(
+			tfm, sizeof(struct caam_skcipher_req_ctx));
 	}
 
 	ret = caam_init_common(ctx, &caam_alg->caam, false);
@@ -3434,9 +3437,9 @@ static int caam_aead_init(struct crypto_aead *tfm)
 	struct aead_alg *alg = crypto_aead_alg(tfm);
 	struct caam_aead_alg *caam_alg =
 		 container_of(alg, struct caam_aead_alg, aead);
-	struct caam_ctx *ctx = crypto_aead_ctx(tfm);
+	struct caam_ctx *ctx = crypto_aead_ctx_dma(tfm);
 
-	crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx));
+	crypto_aead_set_reqsize_dma(tfm, sizeof(struct caam_aead_req_ctx));
 
 	ctx->enginectx.op.do_one_request = aead_do_one_req;
 
@@ -3454,7 +3457,7 @@ static void caam_exit_common(struct caam_ctx *ctx)
 
 static void caam_cra_exit(struct crypto_skcipher *tfm)
 {
-	struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
+	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm);
 
 	if (ctx->fallback)
 		crypto_free_skcipher(ctx->fallback);
@@ -3463,7 +3466,7 @@ static void caam_cra_exit(struct crypto_skcipher *tfm)
 
 static void caam_aead_exit(struct crypto_aead *tfm)
 {
-	caam_exit_common(crypto_aead_ctx(tfm));
+	caam_exit_common(crypto_aead_ctx_dma(tfm));
 }
 
 void caam_algapi_exit(void)
@@ -3492,6 +3495,9 @@ static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
 	alg->base.cra_module = THIS_MODULE;
 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
+#ifdef ARCH_DMA_MINALIGN
+	alg->base.cra_alignmask = ARCH_DMA_MINALIGN - 1;
+#endif
 	alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
 			      CRYPTO_ALG_KERN_DRIVER_ONLY);
 
@@ -3506,6 +3512,9 @@ static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
 	alg->base.cra_module = THIS_MODULE;
 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
+#ifdef ARCH_DMA_MINALIGN
+	alg->base.cra_alignmask = ARCH_DMA_MINALIGN - 1;
+#endif
 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
 			      CRYPTO_ALG_KERN_DRIVER_ONLY;
 

_______________________________________________
linux-arm-kernel mailing list
linux-arm-kernel@lists.infradead.org
http://lists.infradead.org/mailman/listinfo/linux-arm-kernel

  parent reply	other threads:[~2022-05-10 11:07 UTC|newest]

Thread overview: 287+ messages / expand[flat|nested]  mbox.gz  Atom feed  top
2022-04-05 13:57 [PATCH 00/10] mm, arm64: Reduce ARCH_KMALLOC_MINALIGN below the cache line size Catalin Marinas
2022-04-05 13:57 ` Catalin Marinas
2022-04-05 13:57 ` [PATCH 01/10] mm/slab: Decouple ARCH_KMALLOC_MINALIGN from ARCH_DMA_MINALIGN Catalin Marinas
2022-04-05 13:57   ` Catalin Marinas
2022-04-05 23:59   ` Hyeonggon Yoo
2022-04-05 23:59     ` Hyeonggon Yoo
2022-04-06  7:29     ` Arnd Bergmann
2022-04-06  7:29       ` Arnd Bergmann
2022-04-06 12:09       ` Hyeonggon Yoo
2022-04-06 12:09         ` Hyeonggon Yoo
2022-04-06  8:53     ` Catalin Marinas
2022-04-06  8:53       ` Catalin Marinas
2022-04-06  2:01   ` kernel test robot
2022-04-06  8:56     ` Catalin Marinas
2022-04-06  8:56       ` Catalin Marinas
2022-04-06 12:18       ` [kbuild-all] " Chen, Rong A
2022-04-08  6:42   ` Hyeonggon Yoo
2022-04-08  6:42     ` Hyeonggon Yoo
2022-04-08  9:06     ` Hyeonggon Yoo
2022-04-08  9:06       ` Hyeonggon Yoo
2022-04-08  9:11     ` Catalin Marinas
2022-04-08  9:11       ` Catalin Marinas
2022-04-11 10:37   ` Hyeonggon Yoo
2022-04-11 10:37     ` Hyeonggon Yoo
2022-04-11 14:02     ` Catalin Marinas
2022-04-11 14:02       ` Catalin Marinas
2022-04-05 13:57 ` [PATCH 02/10] drivers/base: Use ARCH_DMA_MINALIGN instead of ARCH_KMALLOC_MINALIGN Catalin Marinas
2022-04-05 13:57   ` Catalin Marinas
2022-04-11 14:57   ` Andy Shevchenko
2022-04-11 14:57     ` Andy Shevchenko
2022-04-11 17:39     ` Catalin Marinas
2022-04-11 17:39       ` Catalin Marinas
2022-04-05 13:57 ` [PATCH 03/10] drivers/gpu: " Catalin Marinas
2022-04-05 13:57   ` Catalin Marinas
2022-04-05 13:57 ` [PATCH 04/10] drivers/md: " Catalin Marinas
2022-04-05 13:57   ` Catalin Marinas
2022-04-05 13:57 ` [PATCH 05/10] drivers/spi: " Catalin Marinas
2022-04-05 13:57   ` Catalin Marinas
2022-04-05 14:05   ` Mark Brown
2022-04-05 14:05     ` Mark Brown
2022-04-05 13:57 ` [PATCH 06/10] drivers/usb: " Catalin Marinas
2022-04-05 13:57   ` Catalin Marinas
2022-04-05 13:57 ` [PATCH 07/10] crypto: " Catalin Marinas
2022-04-05 13:57   ` Catalin Marinas
2022-04-05 22:57   ` Herbert Xu
2022-04-05 22:57     ` Herbert Xu
2022-04-06  6:53     ` Ard Biesheuvel
2022-04-06  6:53       ` Ard Biesheuvel
2022-04-06  8:49       ` Catalin Marinas
2022-04-06  8:49         ` Catalin Marinas
2022-04-06  9:41         ` Ard Biesheuvel
2022-04-06  9:41           ` Ard Biesheuvel
2022-04-07  4:30         ` Herbert Xu
2022-04-07  4:30           ` Herbert Xu
2022-04-07 11:01           ` Catalin Marinas
2022-04-07 11:01             ` Catalin Marinas
2022-04-07 11:40             ` Herbert Xu
2022-04-07 11:40               ` Herbert Xu
2022-04-07 16:28               ` Catalin Marinas
2022-04-07 16:28                 ` Catalin Marinas
2022-04-08  3:25                 ` Herbert Xu
2022-04-08  3:25                   ` Herbert Xu
2022-04-08  9:04                   ` Catalin Marinas
2022-04-08  9:04                     ` Catalin Marinas
2022-04-08  9:11                     ` Herbert Xu
2022-04-08  9:11                       ` Herbert Xu
2022-04-12  9:32                       ` Catalin Marinas
2022-04-12  9:32                         ` Catalin Marinas
2022-04-12  9:40                         ` Herbert Xu
2022-04-12  9:40                           ` Herbert Xu
2022-04-12 10:02                           ` Catalin Marinas
2022-04-12 10:02                             ` Catalin Marinas
2022-04-12 10:18                             ` Herbert Xu
2022-04-12 10:18                               ` Herbert Xu
2022-04-12 12:31                               ` Catalin Marinas
2022-04-12 12:31                                 ` Catalin Marinas
2022-04-12 22:01                                 ` Ard Biesheuvel
2022-04-12 22:01                                   ` Ard Biesheuvel
2022-04-13  8:47                                   ` Catalin Marinas
2022-04-13  8:47                                     ` Catalin Marinas
2022-04-13 19:53                                     ` Linus Torvalds
2022-04-13 19:53                                       ` Linus Torvalds
2022-04-14  5:38                                       ` Greg Kroah-Hartman
2022-04-14  5:38                                         ` Greg Kroah-Hartman
2022-04-14 13:52                                         ` Ard Biesheuvel
2022-04-14 13:52                                           ` Ard Biesheuvel
2022-04-14 14:27                                           ` Greg Kroah-Hartman
2022-04-14 14:27                                             ` Greg Kroah-Hartman
2022-04-14 14:36                                             ` Ard Biesheuvel
2022-04-14 14:36                                               ` Ard Biesheuvel
2022-04-14 14:52                                               ` Greg Kroah-Hartman
2022-04-14 14:52                                                 ` Greg Kroah-Hartman
2022-04-14 15:01                                                 ` Ard Biesheuvel
2022-04-14 15:01                                                   ` Ard Biesheuvel
2022-04-14 15:10                                                   ` Ard Biesheuvel
2022-04-14 15:10                                                     ` Ard Biesheuvel
2022-04-14 19:49                                       ` Catalin Marinas
2022-04-14 19:49                                         ` Catalin Marinas
2022-04-14 22:25                                         ` Linus Torvalds
2022-04-14 22:25                                           ` Linus Torvalds
2022-04-15  6:03                                           ` Ard Biesheuvel
2022-04-15  6:03                                             ` Ard Biesheuvel
2022-04-15 11:09                                           ` Arnd Bergmann
2022-04-15 11:09                                             ` Arnd Bergmann
2022-04-16  9:42                                           ` Catalin Marinas
2022-04-16  9:42                                             ` Catalin Marinas
2022-04-20 19:07                                           ` Catalin Marinas
2022-04-20 19:07                                             ` Catalin Marinas
2022-04-20 19:33                                             ` Linus Torvalds
2022-04-20 19:33                                               ` Linus Torvalds
2022-04-14 14:30                                     ` Ard Biesheuvel
2022-04-14 14:30                                       ` Ard Biesheuvel
2022-04-15  6:51                                     ` Herbert Xu
2022-04-15  6:51                                       ` Herbert Xu
2022-04-15  7:49                                       ` Ard Biesheuvel
2022-04-15  7:49                                         ` Ard Biesheuvel
2022-04-15  7:51                                         ` Herbert Xu
2022-04-15  7:51                                           ` Herbert Xu
2022-04-15  8:05                                           ` Ard Biesheuvel
2022-04-15  8:05                                             ` Ard Biesheuvel
2022-04-15  8:12                                             ` Herbert Xu
2022-04-15  8:12                                               ` Herbert Xu
2022-04-15  9:51                                               ` Ard Biesheuvel
2022-04-15  9:51                                                 ` Ard Biesheuvel
2022-04-15 10:04                                                 ` Ard Biesheuvel
2022-04-15 10:04                                                   ` Ard Biesheuvel
2022-04-15 10:12                                                 ` Herbert Xu
2022-04-15 10:12                                                   ` Herbert Xu
2022-04-15 10:22                                                   ` Ard Biesheuvel
2022-04-15 10:22                                                     ` Ard Biesheuvel
2022-04-15 10:45                                                     ` Herbert Xu
2022-04-15 10:45                                                       ` Herbert Xu
2022-04-15 11:38                                                       ` Ard Biesheuvel
2022-04-15 11:38                                                         ` Ard Biesheuvel
2022-04-17  8:08                                                         ` Herbert Xu
2022-04-17  8:08                                                           ` Herbert Xu
2022-04-17  8:31                                                           ` Catalin Marinas
2022-04-17  8:31                                                             ` Catalin Marinas
2022-04-17  8:35                                                             ` Herbert Xu
2022-04-17  8:35                                                               ` Herbert Xu
2022-04-17  8:50                                                               ` Catalin Marinas
2022-04-17  8:50                                                                 ` Catalin Marinas
2022-04-17  8:58                                                                 ` Herbert Xu
2022-04-17  8:58                                                                   ` Herbert Xu
2022-04-17 16:30                                                                   ` Catalin Marinas
2022-04-17 16:30                                                                     ` Catalin Marinas
2022-04-18  8:37                                                                     ` Herbert Xu
2022-04-18  8:37                                                                       ` Herbert Xu
2022-04-18  9:19                                                                       ` Catalin Marinas
2022-04-18  9:19                                                                         ` Catalin Marinas
2022-04-18 16:44                                                                       ` Catalin Marinas
2022-04-18 16:44                                                                         ` Catalin Marinas
2022-04-19 21:50                                                                         ` Ard Biesheuvel
2022-04-19 21:50                                                                           ` Ard Biesheuvel
2022-04-20 10:36                                                                           ` Catalin Marinas
2022-04-20 10:36                                                                             ` Catalin Marinas
2022-04-20 11:29                                                                           ` Arnd Bergmann
2022-04-20 11:29                                                                             ` Arnd Bergmann
2022-04-21  7:20                                                                             ` Christoph Hellwig
2022-04-21  7:20                                                                               ` Christoph Hellwig
2022-04-21  7:36                                                                               ` Arnd Bergmann
2022-04-21  7:36                                                                                 ` Arnd Bergmann
2022-04-21  7:44                                                                                 ` Christoph Hellwig
2022-04-21  7:44                                                                                   ` Christoph Hellwig
2022-04-21  8:05                                                                               ` Ard Biesheuvel
2022-04-21  8:05                                                                                 ` Ard Biesheuvel
2022-04-21 11:06                                                                               ` Catalin Marinas
2022-04-21 11:06                                                                                 ` Catalin Marinas
2022-04-21 12:28                                                                                 ` Arnd Bergmann
2022-04-21 12:28                                                                                   ` Arnd Bergmann
2022-04-21 13:25                                                                                   ` Catalin Marinas
2022-04-21 13:25                                                                                     ` Catalin Marinas
2022-04-21 13:47                                                                                     ` Arnd Bergmann
2022-04-21 13:47                                                                                       ` Arnd Bergmann
2022-04-21 14:44                                                                                       ` Catalin Marinas
2022-04-21 14:44                                                                                         ` Catalin Marinas
2022-04-21 14:47                                                                                         ` Arnd Bergmann
2022-04-21 14:47                                                                                           ` Arnd Bergmann
2022-05-10 11:03                                                                       ` [RFC PATCH 0/7] crypto: Add helpers for allocating with DMA alignment Herbert Xu
2022-05-10 11:03                                                                         ` Herbert Xu
2022-05-10 11:07                                                                         ` [RFC PATCH 1/7] crypto: Prepare to move crypto_tfm_ctx Herbert Xu
2022-05-10 11:07                                                                           ` Herbert Xu
2022-05-10 11:07                                                                         ` [RFC PATCH 2/7] crypto: api - Add crypto_tfm_ctx_dma Herbert Xu
2022-05-10 11:07                                                                           ` Herbert Xu
2022-05-10 17:10                                                                           ` Catalin Marinas
2022-05-10 17:10                                                                             ` Catalin Marinas
2022-05-12  3:57                                                                             ` Herbert Xu
2022-05-12  3:57                                                                               ` Herbert Xu
2022-05-10 11:07                                                                         ` [RFC PATCH 3/7] crypto: aead - Add ctx helpers with DMA alignment Herbert Xu
2022-05-10 11:07                                                                           ` Herbert Xu
2022-05-10 11:07                                                                         ` [RFC PATCH 4/7] crypto: hash " Herbert Xu
2022-05-10 11:07                                                                           ` Herbert Xu
2022-05-10 11:07                                                                         ` [RFC PATCH 5/7] crypto: skcipher " Herbert Xu
2022-05-10 11:07                                                                           ` Herbert Xu
2022-05-10 11:07                                                                         ` [RFC PATCH 6/7] crypto: api - Increase MAX_ALGAPI_ALIGNMASK to 127 Herbert Xu
2022-05-10 11:07                                                                           ` Herbert Xu
2022-05-10 11:07                                                                         ` Herbert Xu [this message]
2022-05-10 11:07                                                                           ` [RFC PATCH 7/7] crypto: caam - Explicitly request DMA alignment Herbert Xu
2022-04-15 12:18                                             ` [PATCH 07/10] crypto: Use ARCH_DMA_MINALIGN instead of ARCH_KMALLOC_MINALIGN Catalin Marinas
2022-04-15 12:18                                               ` Catalin Marinas
2022-04-15 12:25                                               ` Ard Biesheuvel
2022-04-15 12:25                                                 ` Ard Biesheuvel
2022-04-15  9:51                                           ` Catalin Marinas
2022-04-15  9:51                                             ` Catalin Marinas
2022-04-15 12:31                                             ` Catalin Marinas
2022-04-15 12:31                                               ` Catalin Marinas
2022-04-17  8:11                                               ` Herbert Xu
2022-04-17  8:11                                                 ` Herbert Xu
2022-04-17  8:38                                                 ` Catalin Marinas
2022-04-17  8:38                                                   ` Catalin Marinas
2022-04-17  8:43                                                   ` Herbert Xu
2022-04-17  8:43                                                     ` Herbert Xu
2022-04-17 16:29                                                     ` Catalin Marinas
2022-04-17 16:29                                                       ` Catalin Marinas
2022-07-15 22:23                                                       ` Isaac Manjarres
2022-07-15 22:23                                                         ` Isaac Manjarres
2022-07-16  3:25                                                         ` Herbert Xu
2022-07-16  3:25                                                           ` Herbert Xu
2022-07-18 17:53                                                           ` Catalin Marinas
2022-07-18 17:53                                                             ` Catalin Marinas
2022-09-21  0:47                                                             ` Isaac Manjarres
2022-09-21  0:47                                                               ` Isaac Manjarres
2022-09-30 18:32                                                               ` Catalin Marinas
2022-09-30 18:32                                                                 ` Catalin Marinas
2022-09-30 19:35                                                                 ` Linus Torvalds
2022-09-30 19:35                                                                   ` Linus Torvalds
2022-10-01 22:29                                                                   ` Catalin Marinas
2022-10-01 22:29                                                                     ` Catalin Marinas
2022-10-02 17:00                                                                     ` Linus Torvalds
2022-10-02 17:00                                                                       ` Linus Torvalds
2022-10-02 22:08                                                                       ` Ard Biesheuvel
2022-10-02 22:08                                                                         ` Ard Biesheuvel
2022-10-02 22:24                                                                         ` Linus Torvalds
2022-10-02 22:24                                                                           ` Linus Torvalds
2022-10-03 17:39                                                                           ` Catalin Marinas
2022-10-03 17:39                                                                             ` Catalin Marinas
2022-10-12 17:45                                                                 ` Isaac Manjarres
2022-10-12 17:45                                                                   ` Isaac Manjarres
2022-10-13 16:57                                                                   ` Catalin Marinas
2022-10-13 16:57                                                                     ` Catalin Marinas
2022-10-13 18:58                                                                     ` Saravana Kannan
2022-10-13 18:58                                                                       ` Saravana Kannan
2022-10-14 16:25                                                                       ` Catalin Marinas
2022-10-14 16:25                                                                         ` Catalin Marinas
2022-10-14 20:23                                                                         ` Saravana Kannan
2022-10-14 20:23                                                                           ` Saravana Kannan
2022-10-14 20:44                                                                           ` Linus Torvalds
2022-10-14 20:44                                                                             ` Linus Torvalds
2022-10-16 21:37                                                                             ` Catalin Marinas
2022-10-16 21:37                                                                               ` Catalin Marinas
2022-04-12 10:20                             ` Catalin Marinas
2022-04-12 10:20                               ` Catalin Marinas
2022-04-07  6:14   ` Muchun Song
2022-04-07  6:14     ` Muchun Song
2022-04-07  9:25     ` Catalin Marinas
2022-04-07  9:25       ` Catalin Marinas
2022-04-07 10:00       ` Muchun Song
2022-04-07 10:00         ` Muchun Song
2022-04-07 11:06         ` Catalin Marinas
2022-04-07 11:06           ` Catalin Marinas
2022-04-05 13:57 ` [PATCH 08/10] mm/slab: Allow dynamic kmalloc() minimum alignment Catalin Marinas
2022-04-05 13:57   ` Catalin Marinas
2022-04-07  3:46   ` Hyeonggon Yoo
2022-04-07  3:46     ` Hyeonggon Yoo
2022-04-07  8:50     ` Catalin Marinas
2022-04-07  8:50       ` Catalin Marinas
2022-04-07  9:18       ` Hyeonggon Yoo
2022-04-07  9:18         ` Hyeonggon Yoo
2022-04-07  9:35         ` Catalin Marinas
2022-04-07  9:35           ` Catalin Marinas
2022-04-07 12:26           ` Hyeonggon Yoo
2022-04-07 12:26             ` Hyeonggon Yoo
2022-04-11 11:55   ` Hyeonggon Yoo
2022-04-11 11:55     ` Hyeonggon Yoo
2022-04-05 13:57 ` [PATCH 09/10] mm/slab: Simplify create_kmalloc_cache() args and make it static Catalin Marinas
2022-04-05 13:57   ` Catalin Marinas
2022-04-06  5:35   ` kernel test robot
2022-04-06  8:57     ` Catalin Marinas
2022-04-06  8:57       ` Catalin Marinas
2022-04-05 13:57 ` [PATCH 10/10] arm64: Enable dynamic kmalloc() minimum alignment Catalin Marinas
2022-04-05 13:57   ` Catalin Marinas
2022-04-07 14:40 ` [PATCH 00/10] mm, arm64: Reduce ARCH_KMALLOC_MINALIGN below the cache line size Vlastimil Babka
2022-04-07 14:40   ` Vlastimil Babka
2022-04-07 17:48   ` Catalin Marinas
2022-04-07 17:48     ` Catalin Marinas
2022-04-08 14:37     ` Vlastimil Babka
2022-04-08 14:37       ` Vlastimil Babka

Reply instructions:

You may reply publicly to this message via plain-text email
using any one of the following methods:

* Save the following mbox file, import it into your mail client,
  and reply-to-all from there: mbox

  Avoid top-posting and favor interleaved quoting:
  https://en.wikipedia.org/wiki/Posting_style#Interleaved_style

* Reply using the --to, --cc, and --in-reply-to
  switches of git-send-email(1):

  git send-email \
    --in-reply-to=E1noNi6-00BzW8-4S@fornost.hmeau.com \
    --to=herbert@gondor.apana.org.au \
    --cc=akpm@linux-foundation.org \
    --cc=ardb@kernel.org \
    --cc=arnd@arndb.de \
    --cc=catalin.marinas@arm.com \
    --cc=davem@davemloft.net \
    --cc=gregkh@linuxfoundation.org \
    --cc=linux-arm-kernel@lists.infradead.org \
    --cc=linux-crypto@vger.kernel.org \
    --cc=linux-kernel@vger.kernel.org \
    --cc=linux-mm@kvack.org \
    --cc=maz@kernel.org \
    --cc=torvalds@linux-foundation.org \
    --cc=will@kernel.org \
    /path/to/YOUR_REPLY

  https://kernel.org/pub/software/scm/git/docs/git-send-email.html

* If your mail client supports setting the In-Reply-To header
  via mailto: links, try the mailto: link
Be sure your reply has a Subject: header at the top and a blank line before the message body.
This is an external index of several public inboxes,
see mirroring instructions on how to clone and mirror
all data and code used by this external index.