All of lore.kernel.org
 help / color / mirror / Atom feed
* [PATCH] locking/arch: Rewrite local_add_unless as static inline function
@ 2023-07-31  8:42 Uros Bizjak
  2023-08-07 16:13 ` Christophe Leroy
  2023-10-04 15:46 ` [tip: locking/core] locking/local, arch: Rewrite local_add_unless() as a " tip-bot2 for Uros Bizjak
  0 siblings, 2 replies; 4+ messages in thread
From: Uros Bizjak @ 2023-07-31  8:42 UTC (permalink / raw)
  To: loongarch, linux-mips, x86, linux-arch, linux-kernel
  Cc: Uros Bizjak, Will Deacon, Peter Zijlstra, Boqun Feng,
	Mark Rutland, Richard Henderson, Ivan Kokshaysky, Matt Turner,
	Huacai Chen, WANG Xuerui, Jiaxun Yang, Jun Yi,
	Thomas Bogendoerfer, Michael Ellerman, Nicholas Piggin,
	Christophe Leroy, Thomas Gleixner, Ingo Molnar, Borislav Petkov,
	Dave Hansen, H. Peter Anvin

Rewrite local_add_unless as a static inline function with boolean
return value, similar to arch_atomic_add_unless arch fallbacks.

The function is currently unused.

Cc: Will Deacon <will@kernel.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Mark Rutland <mark.rutland@arm.com>
Cc: Richard Henderson <richard.henderson@linaro.org>
Cc: Ivan Kokshaysky <ink@jurassic.park.msu.ru>
Cc: Matt Turner <mattst88@gmail.com>
Cc: Huacai Chen <chenhuacai@kernel.org>
Cc: WANG Xuerui <kernel@xen0n.name>
Cc: Jiaxun Yang <jiaxun.yang@flygoat.com>
Cc: Jun Yi <yijun@loongson.cn>
Cc: Thomas Bogendoerfer <tsbogend@alpha.franken.de>
Cc: Michael Ellerman <mpe@ellerman.id.au>
Cc: Nicholas Piggin <npiggin@gmail.com>
Cc: Christophe Leroy <christophe.leroy@csgroup.eu>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Ingo Molnar <mingo@redhat.com>
Cc: Borislav Petkov <bp@alien8.de>
Cc: Dave Hansen <dave.hansen@linux.intel.com>
Cc: "H. Peter Anvin" <hpa@zytor.com>
Signed-off-by: Uros Bizjak <ubizjak@gmail.com>
---
 arch/alpha/include/asm/local.h     | 33 +++++++++++++++---------------
 arch/loongarch/include/asm/local.h | 27 ++++++++++++++----------
 arch/mips/include/asm/local.h      | 27 ++++++++++++++----------
 arch/powerpc/include/asm/local.h   | 12 +++++------
 arch/x86/include/asm/local.h       | 33 +++++++++++++++---------------
 5 files changed, 70 insertions(+), 62 deletions(-)

diff --git a/arch/alpha/include/asm/local.h b/arch/alpha/include/asm/local.h
index 0fcaad642cc3..88eb398947a5 100644
--- a/arch/alpha/include/asm/local.h
+++ b/arch/alpha/include/asm/local.h
@@ -65,28 +65,27 @@ static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
 #define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
 
 /**
- * local_add_unless - add unless the number is a given value
+ * local_add_unless - add unless the number is already a given value
  * @l: pointer of type local_t
  * @a: the amount to add to l...
  * @u: ...unless l is equal to u.
  *
- * Atomically adds @a to @l, so long as it was not @u.
- * Returns non-zero if @l was not @u, and zero otherwise.
+ * Atomically adds @a to @l, if @v was not already @u.
+ * Returns true if the addition was done.
  */
-#define local_add_unless(l, a, u)				\
-({								\
-	long c, old;						\
-	c = local_read(l);					\
-	for (;;) {						\
-		if (unlikely(c == (u)))				\
-			break;					\
-		old = local_cmpxchg((l), c, c + (a));	\
-		if (likely(old == c))				\
-			break;					\
-		c = old;					\
-	}							\
-	c != (u);						\
-})
+static __inline__ bool
+local_add_unless(local_t *l, long a, long u)
+{
+	long c = local_read(l);
+
+	do {
+		if (unlikely(c == u))
+			return false;
+	} while (!local_try_cmpxchg(l, &c, c + a));
+
+	return true;
+}
+
 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
 
 #define local_add_negative(a, l) (local_add_return((a), (l)) < 0)
diff --git a/arch/loongarch/include/asm/local.h b/arch/loongarch/include/asm/local.h
index 83e995b30e47..15bc3579f16c 100644
--- a/arch/loongarch/include/asm/local.h
+++ b/arch/loongarch/include/asm/local.h
@@ -70,22 +70,27 @@ static inline bool local_try_cmpxchg(local_t *l, long *old, long new)
 #define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
 
 /**
- * local_add_unless - add unless the number is a given value
+ * local_add_unless - add unless the number is already a given value
  * @l: pointer of type local_t
  * @a: the amount to add to l...
  * @u: ...unless l is equal to u.
  *
- * Atomically adds @a to @l, so long as it was not @u.
- * Returns non-zero if @l was not @u, and zero otherwise.
+ * Atomically adds @a to @l, if @v was not already @u.
+ * Returns true if the addition was done.
  */
-#define local_add_unless(l, a, u)				\
-({								\
-	long c, old;						\
-	c = local_read(l);					\
-	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
-		c = old;					\
-	c != (u);						\
-})
+static inline bool
+local_add_unless(local_t *l, long a, long u)
+{
+	long c = local_read(l);
+
+	do {
+		if (unlikely(c == u))
+			return false;
+	} while (!local_try_cmpxchg(l, &c, c + a));
+
+	return true;
+}
+
 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
 
 #define local_dec_return(l) local_sub_return(1, (l))
diff --git a/arch/mips/include/asm/local.h b/arch/mips/include/asm/local.h
index 5daf6fe8e3e9..90435158a083 100644
--- a/arch/mips/include/asm/local.h
+++ b/arch/mips/include/asm/local.h
@@ -108,22 +108,27 @@ static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
 #define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
 
 /**
- * local_add_unless - add unless the number is a given value
+ * local_add_unless - add unless the number is already a given value
  * @l: pointer of type local_t
  * @a: the amount to add to l...
  * @u: ...unless l is equal to u.
  *
- * Atomically adds @a to @l, so long as it was not @u.
- * Returns non-zero if @l was not @u, and zero otherwise.
+ * Atomically adds @a to @l, if @v was not already @u.
+ * Returns true if the addition was done.
  */
-#define local_add_unless(l, a, u)				\
-({								\
-	long c, old;						\
-	c = local_read(l);					\
-	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
-		c = old;					\
-	c != (u);						\
-})
+static __inline__ bool
+local_add_unless(local_t *l, long a, long u)
+{
+	long c = local_read(l);
+
+	do {
+		if (unlikely(c == u))
+			return false;
+	} while (!local_try_cmpxchg(l, &c, c + a));
+
+	return true;
+}
+
 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
 
 #define local_dec_return(l) local_sub_return(1, (l))
diff --git a/arch/powerpc/include/asm/local.h b/arch/powerpc/include/asm/local.h
index 45492fb5bf22..ec6ced6d7ced 100644
--- a/arch/powerpc/include/asm/local.h
+++ b/arch/powerpc/include/asm/local.h
@@ -115,23 +115,23 @@ static __inline__ long local_xchg(local_t *l, long n)
 }
 
 /**
- * local_add_unless - add unless the number is a given value
+ * local_add_unless - add unless the number is already a given value
  * @l: pointer of type local_t
  * @a: the amount to add to v...
  * @u: ...unless v is equal to u.
  *
- * Atomically adds @a to @l, so long as it was not @u.
- * Returns non-zero if @l was not @u, and zero otherwise.
+ * Atomically adds @a to @l, if @v was not already @u.
+ * Returns true if the addition was done.
  */
-static __inline__ int local_add_unless(local_t *l, long a, long u)
+static __inline__ bool local_add_unless(local_t *l, long a, long u)
 {
 	unsigned long flags;
-	int ret = 0;
+	bool ret = false;
 
 	powerpc_local_irq_pmu_save(flags);
 	if (l->v != u) {
 		l->v += a;
-		ret = 1;
+		ret = true;
 	}
 	powerpc_local_irq_pmu_restore(flags);
 
diff --git a/arch/x86/include/asm/local.h b/arch/x86/include/asm/local.h
index 56d4ef604b91..46ce92d4e556 100644
--- a/arch/x86/include/asm/local.h
+++ b/arch/x86/include/asm/local.h
@@ -135,28 +135,27 @@ static inline bool local_try_cmpxchg(local_t *l, long *old, long new)
 #define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
 
 /**
- * local_add_unless - add unless the number is a given value
+ * local_add_unless - add unless the number is already a given value
  * @l: pointer of type local_t
  * @a: the amount to add to l...
  * @u: ...unless l is equal to u.
  *
- * Atomically adds @a to @l, so long as it was not @u.
- * Returns non-zero if @l was not @u, and zero otherwise.
+ * Atomically adds @a to @l, if @v was not already @u.
+ * Returns true if the addition was done.
  */
-#define local_add_unless(l, a, u)				\
-({								\
-	long c, old;						\
-	c = local_read((l));					\
-	for (;;) {						\
-		if (unlikely(c == (u)))				\
-			break;					\
-		old = local_cmpxchg((l), c, c + (a));		\
-		if (likely(old == c))				\
-			break;					\
-		c = old;					\
-	}							\
-	c != (u);						\
-})
+static __always_inline bool
+local_add_unless(local_t *l, long a, long u)
+{
+	long c = local_read(l);
+
+	do {
+		if (unlikely(c == u))
+			return false;
+	} while (!local_try_cmpxchg(l, &c, c + a));
+
+	return true;
+}
+
 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
 
 /* On x86_32, these are no better than the atomic variants.
-- 
2.41.0


^ permalink raw reply related	[flat|nested] 4+ messages in thread

* Re: [PATCH] locking/arch: Rewrite local_add_unless as static inline function
  2023-07-31  8:42 [PATCH] locking/arch: Rewrite local_add_unless as static inline function Uros Bizjak
@ 2023-08-07 16:13 ` Christophe Leroy
  2023-08-07 16:36   ` Uros Bizjak
  2023-10-04 15:46 ` [tip: locking/core] locking/local, arch: Rewrite local_add_unless() as a " tip-bot2 for Uros Bizjak
  1 sibling, 1 reply; 4+ messages in thread
From: Christophe Leroy @ 2023-08-07 16:13 UTC (permalink / raw)
  To: Uros Bizjak, loongarch, linux-mips, x86, linux-arch, linux-kernel
  Cc: Will Deacon, Peter Zijlstra, Boqun Feng, Mark Rutland,
	Richard Henderson, Ivan Kokshaysky, Matt Turner, Huacai Chen,
	WANG Xuerui, Jiaxun Yang, Jun Yi, Thomas Bogendoerfer,
	Michael Ellerman, Nicholas Piggin, Thomas Gleixner, Ingo Molnar,
	Borislav Petkov, Dave Hansen, H. Peter Anvin



Le 31/07/2023 à 10:42, Uros Bizjak a écrit :
> Rewrite local_add_unless as a static inline function with boolean
> return value, similar to arch_atomic_add_unless arch fallbacks.
> 
> The function is currently unused.

Is it worth keeping the function if it is not used ?

Christophe

> 
> Cc: Will Deacon <will@kernel.org>
> Cc: Peter Zijlstra <peterz@infradead.org>
> Cc: Boqun Feng <boqun.feng@gmail.com>
> Cc: Mark Rutland <mark.rutland@arm.com>
> Cc: Richard Henderson <richard.henderson@linaro.org>
> Cc: Ivan Kokshaysky <ink@jurassic.park.msu.ru>
> Cc: Matt Turner <mattst88@gmail.com>
> Cc: Huacai Chen <chenhuacai@kernel.org>
> Cc: WANG Xuerui <kernel@xen0n.name>
> Cc: Jiaxun Yang <jiaxun.yang@flygoat.com>
> Cc: Jun Yi <yijun@loongson.cn>
> Cc: Thomas Bogendoerfer <tsbogend@alpha.franken.de>
> Cc: Michael Ellerman <mpe@ellerman.id.au>
> Cc: Nicholas Piggin <npiggin@gmail.com>
> Cc: Christophe Leroy <christophe.leroy@csgroup.eu>
> Cc: Thomas Gleixner <tglx@linutronix.de>
> Cc: Ingo Molnar <mingo@redhat.com>
> Cc: Borislav Petkov <bp@alien8.de>
> Cc: Dave Hansen <dave.hansen@linux.intel.com>
> Cc: "H. Peter Anvin" <hpa@zytor.com>
> Signed-off-by: Uros Bizjak <ubizjak@gmail.com>
> ---
>   arch/alpha/include/asm/local.h     | 33 +++++++++++++++---------------
>   arch/loongarch/include/asm/local.h | 27 ++++++++++++++----------
>   arch/mips/include/asm/local.h      | 27 ++++++++++++++----------
>   arch/powerpc/include/asm/local.h   | 12 +++++------
>   arch/x86/include/asm/local.h       | 33 +++++++++++++++---------------
>   5 files changed, 70 insertions(+), 62 deletions(-)
> 
> diff --git a/arch/alpha/include/asm/local.h b/arch/alpha/include/asm/local.h
> index 0fcaad642cc3..88eb398947a5 100644
> --- a/arch/alpha/include/asm/local.h
> +++ b/arch/alpha/include/asm/local.h
> @@ -65,28 +65,27 @@ static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
>   #define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
>   
>   /**
> - * local_add_unless - add unless the number is a given value
> + * local_add_unless - add unless the number is already a given value
>    * @l: pointer of type local_t
>    * @a: the amount to add to l...
>    * @u: ...unless l is equal to u.
>    *
> - * Atomically adds @a to @l, so long as it was not @u.
> - * Returns non-zero if @l was not @u, and zero otherwise.
> + * Atomically adds @a to @l, if @v was not already @u.
> + * Returns true if the addition was done.
>    */
> -#define local_add_unless(l, a, u)				\
> -({								\
> -	long c, old;						\
> -	c = local_read(l);					\
> -	for (;;) {						\
> -		if (unlikely(c == (u)))				\
> -			break;					\
> -		old = local_cmpxchg((l), c, c + (a));	\
> -		if (likely(old == c))				\
> -			break;					\
> -		c = old;					\
> -	}							\
> -	c != (u);						\
> -})
> +static __inline__ bool
> +local_add_unless(local_t *l, long a, long u)
> +{
> +	long c = local_read(l);
> +
> +	do {
> +		if (unlikely(c == u))
> +			return false;
> +	} while (!local_try_cmpxchg(l, &c, c + a));
> +
> +	return true;
> +}
> +
>   #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
>   
>   #define local_add_negative(a, l) (local_add_return((a), (l)) < 0)
> diff --git a/arch/loongarch/include/asm/local.h b/arch/loongarch/include/asm/local.h
> index 83e995b30e47..15bc3579f16c 100644
> --- a/arch/loongarch/include/asm/local.h
> +++ b/arch/loongarch/include/asm/local.h
> @@ -70,22 +70,27 @@ static inline bool local_try_cmpxchg(local_t *l, long *old, long new)
>   #define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
>   
>   /**
> - * local_add_unless - add unless the number is a given value
> + * local_add_unless - add unless the number is already a given value
>    * @l: pointer of type local_t
>    * @a: the amount to add to l...
>    * @u: ...unless l is equal to u.
>    *
> - * Atomically adds @a to @l, so long as it was not @u.
> - * Returns non-zero if @l was not @u, and zero otherwise.
> + * Atomically adds @a to @l, if @v was not already @u.
> + * Returns true if the addition was done.
>    */
> -#define local_add_unless(l, a, u)				\
> -({								\
> -	long c, old;						\
> -	c = local_read(l);					\
> -	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
> -		c = old;					\
> -	c != (u);						\
> -})
> +static inline bool
> +local_add_unless(local_t *l, long a, long u)
> +{
> +	long c = local_read(l);
> +
> +	do {
> +		if (unlikely(c == u))
> +			return false;
> +	} while (!local_try_cmpxchg(l, &c, c + a));
> +
> +	return true;
> +}
> +
>   #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
>   
>   #define local_dec_return(l) local_sub_return(1, (l))
> diff --git a/arch/mips/include/asm/local.h b/arch/mips/include/asm/local.h
> index 5daf6fe8e3e9..90435158a083 100644
> --- a/arch/mips/include/asm/local.h
> +++ b/arch/mips/include/asm/local.h
> @@ -108,22 +108,27 @@ static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
>   #define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
>   
>   /**
> - * local_add_unless - add unless the number is a given value
> + * local_add_unless - add unless the number is already a given value
>    * @l: pointer of type local_t
>    * @a: the amount to add to l...
>    * @u: ...unless l is equal to u.
>    *
> - * Atomically adds @a to @l, so long as it was not @u.
> - * Returns non-zero if @l was not @u, and zero otherwise.
> + * Atomically adds @a to @l, if @v was not already @u.
> + * Returns true if the addition was done.
>    */
> -#define local_add_unless(l, a, u)				\
> -({								\
> -	long c, old;						\
> -	c = local_read(l);					\
> -	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
> -		c = old;					\
> -	c != (u);						\
> -})
> +static __inline__ bool
> +local_add_unless(local_t *l, long a, long u)
> +{
> +	long c = local_read(l);
> +
> +	do {
> +		if (unlikely(c == u))
> +			return false;
> +	} while (!local_try_cmpxchg(l, &c, c + a));
> +
> +	return true;
> +}
> +
>   #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
>   
>   #define local_dec_return(l) local_sub_return(1, (l))
> diff --git a/arch/powerpc/include/asm/local.h b/arch/powerpc/include/asm/local.h
> index 45492fb5bf22..ec6ced6d7ced 100644
> --- a/arch/powerpc/include/asm/local.h
> +++ b/arch/powerpc/include/asm/local.h
> @@ -115,23 +115,23 @@ static __inline__ long local_xchg(local_t *l, long n)
>   }
>   
>   /**
> - * local_add_unless - add unless the number is a given value
> + * local_add_unless - add unless the number is already a given value
>    * @l: pointer of type local_t
>    * @a: the amount to add to v...
>    * @u: ...unless v is equal to u.
>    *
> - * Atomically adds @a to @l, so long as it was not @u.
> - * Returns non-zero if @l was not @u, and zero otherwise.
> + * Atomically adds @a to @l, if @v was not already @u.
> + * Returns true if the addition was done.
>    */
> -static __inline__ int local_add_unless(local_t *l, long a, long u)
> +static __inline__ bool local_add_unless(local_t *l, long a, long u)
>   {
>   	unsigned long flags;
> -	int ret = 0;
> +	bool ret = false;
>   
>   	powerpc_local_irq_pmu_save(flags);
>   	if (l->v != u) {
>   		l->v += a;
> -		ret = 1;
> +		ret = true;
>   	}
>   	powerpc_local_irq_pmu_restore(flags);
>   
> diff --git a/arch/x86/include/asm/local.h b/arch/x86/include/asm/local.h
> index 56d4ef604b91..46ce92d4e556 100644
> --- a/arch/x86/include/asm/local.h
> +++ b/arch/x86/include/asm/local.h
> @@ -135,28 +135,27 @@ static inline bool local_try_cmpxchg(local_t *l, long *old, long new)
>   #define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
>   
>   /**
> - * local_add_unless - add unless the number is a given value
> + * local_add_unless - add unless the number is already a given value
>    * @l: pointer of type local_t
>    * @a: the amount to add to l...
>    * @u: ...unless l is equal to u.
>    *
> - * Atomically adds @a to @l, so long as it was not @u.
> - * Returns non-zero if @l was not @u, and zero otherwise.
> + * Atomically adds @a to @l, if @v was not already @u.
> + * Returns true if the addition was done.
>    */
> -#define local_add_unless(l, a, u)				\
> -({								\
> -	long c, old;						\
> -	c = local_read((l));					\
> -	for (;;) {						\
> -		if (unlikely(c == (u)))				\
> -			break;					\
> -		old = local_cmpxchg((l), c, c + (a));		\
> -		if (likely(old == c))				\
> -			break;					\
> -		c = old;					\
> -	}							\
> -	c != (u);						\
> -})
> +static __always_inline bool
> +local_add_unless(local_t *l, long a, long u)
> +{
> +	long c = local_read(l);
> +
> +	do {
> +		if (unlikely(c == u))
> +			return false;
> +	} while (!local_try_cmpxchg(l, &c, c + a));
> +
> +	return true;
> +}
> +
>   #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
>   
>   /* On x86_32, these are no better than the atomic variants.

^ permalink raw reply	[flat|nested] 4+ messages in thread

* Re: [PATCH] locking/arch: Rewrite local_add_unless as static inline function
  2023-08-07 16:13 ` Christophe Leroy
@ 2023-08-07 16:36   ` Uros Bizjak
  0 siblings, 0 replies; 4+ messages in thread
From: Uros Bizjak @ 2023-08-07 16:36 UTC (permalink / raw)
  To: Christophe Leroy
  Cc: loongarch, linux-mips, x86, linux-arch, linux-kernel,
	Will Deacon, Peter Zijlstra, Boqun Feng, Mark Rutland,
	Richard Henderson, Ivan Kokshaysky, Matt Turner, Huacai Chen,
	WANG Xuerui, Jiaxun Yang, Jun Yi, Thomas Bogendoerfer,
	Michael Ellerman, Nicholas Piggin, Thomas Gleixner, Ingo Molnar,
	Borislav Petkov, Dave Hansen, H. Peter Anvin

On Mon, Aug 7, 2023 at 6:13 PM Christophe Leroy
<christophe.leroy@csgroup.eu> wrote:
>
>
>
> Le 31/07/2023 à 10:42, Uros Bizjak a écrit :
> > Rewrite local_add_unless as a static inline function with boolean
> > return value, similar to arch_atomic_add_unless arch fallbacks.
> >
> > The function is currently unused.
>
> Is it worth keeping the function if it is not used ?

We already have plenty of these kinds of unused locking functions in
arch/*/include/asm/atomic.h, so I thought we could leave this one as
well.

Uros.

^ permalink raw reply	[flat|nested] 4+ messages in thread

* [tip: locking/core] locking/local, arch: Rewrite local_add_unless() as a static inline function
  2023-07-31  8:42 [PATCH] locking/arch: Rewrite local_add_unless as static inline function Uros Bizjak
  2023-08-07 16:13 ` Christophe Leroy
@ 2023-10-04 15:46 ` tip-bot2 for Uros Bizjak
  1 sibling, 0 replies; 4+ messages in thread
From: tip-bot2 for Uros Bizjak @ 2023-10-04 15:46 UTC (permalink / raw)
  To: linux-tip-commits; +Cc: Uros Bizjak, Ingo Molnar, x86, linux-kernel

The following commit has been merged into the locking/core branch of tip:

Commit-ID:     5e0eb67974e88dbaded765278a3ffe7af33e3b22
Gitweb:        https://git.kernel.org/tip/5e0eb67974e88dbaded765278a3ffe7af33e3b22
Author:        Uros Bizjak <ubizjak@gmail.com>
AuthorDate:    Mon, 31 Jul 2023 10:42:23 +02:00
Committer:     Ingo Molnar <mingo@kernel.org>
CommitterDate: Wed, 04 Oct 2023 11:38:11 +02:00

locking/local, arch: Rewrite local_add_unless() as a static inline function

Rewrite local_add_unless() as a static inline function with boolean
return value, similar to the arch_atomic_add_unless() arch fallbacks.

The function is currently unused.

Signed-off-by: Uros Bizjak <ubizjak@gmail.com>
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Link: https://lore.kernel.org/r/20230731084458.28096-1-ubizjak@gmail.com
---
 arch/alpha/include/asm/local.h     | 33 ++++++++++++++---------------
 arch/loongarch/include/asm/local.h | 27 ++++++++++++++----------
 arch/mips/include/asm/local.h      | 27 ++++++++++++++----------
 arch/powerpc/include/asm/local.h   | 12 +++++------
 arch/x86/include/asm/local.h       | 33 ++++++++++++++---------------
 5 files changed, 70 insertions(+), 62 deletions(-)

diff --git a/arch/alpha/include/asm/local.h b/arch/alpha/include/asm/local.h
index 0fcaad6..88eb398 100644
--- a/arch/alpha/include/asm/local.h
+++ b/arch/alpha/include/asm/local.h
@@ -65,28 +65,27 @@ static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
 #define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
 
 /**
- * local_add_unless - add unless the number is a given value
+ * local_add_unless - add unless the number is already a given value
  * @l: pointer of type local_t
  * @a: the amount to add to l...
  * @u: ...unless l is equal to u.
  *
- * Atomically adds @a to @l, so long as it was not @u.
- * Returns non-zero if @l was not @u, and zero otherwise.
+ * Atomically adds @a to @l, if @v was not already @u.
+ * Returns true if the addition was done.
  */
-#define local_add_unless(l, a, u)				\
-({								\
-	long c, old;						\
-	c = local_read(l);					\
-	for (;;) {						\
-		if (unlikely(c == (u)))				\
-			break;					\
-		old = local_cmpxchg((l), c, c + (a));	\
-		if (likely(old == c))				\
-			break;					\
-		c = old;					\
-	}							\
-	c != (u);						\
-})
+static __inline__ bool
+local_add_unless(local_t *l, long a, long u)
+{
+	long c = local_read(l);
+
+	do {
+		if (unlikely(c == u))
+			return false;
+	} while (!local_try_cmpxchg(l, &c, c + a));
+
+	return true;
+}
+
 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
 
 #define local_add_negative(a, l) (local_add_return((a), (l)) < 0)
diff --git a/arch/loongarch/include/asm/local.h b/arch/loongarch/include/asm/local.h
index c496758..f53ea65 100644
--- a/arch/loongarch/include/asm/local.h
+++ b/arch/loongarch/include/asm/local.h
@@ -70,22 +70,27 @@ static inline bool local_try_cmpxchg(local_t *l, long *old, long new)
 #define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
 
 /**
- * local_add_unless - add unless the number is a given value
+ * local_add_unless - add unless the number is already a given value
  * @l: pointer of type local_t
  * @a: the amount to add to l...
  * @u: ...unless l is equal to u.
  *
- * Atomically adds @a to @l, so long as it was not @u.
- * Returns non-zero if @l was not @u, and zero otherwise.
+ * Atomically adds @a to @l, if @v was not already @u.
+ * Returns true if the addition was done.
  */
-#define local_add_unless(l, a, u)				\
-({								\
-	long c, old;						\
-	c = local_read(l);					\
-	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
-		c = old;					\
-	c != (u);						\
-})
+static inline bool
+local_add_unless(local_t *l, long a, long u)
+{
+	long c = local_read(l);
+
+	do {
+		if (unlikely(c == u))
+			return false;
+	} while (!local_try_cmpxchg(l, &c, c + a));
+
+	return true;
+}
+
 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
 
 #define local_dec_return(l) local_sub_return(1, (l))
diff --git a/arch/mips/include/asm/local.h b/arch/mips/include/asm/local.h
index e6ae3df..86fc240 100644
--- a/arch/mips/include/asm/local.h
+++ b/arch/mips/include/asm/local.h
@@ -108,22 +108,27 @@ static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
 #define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
 
 /**
- * local_add_unless - add unless the number is a given value
+ * local_add_unless - add unless the number is already a given value
  * @l: pointer of type local_t
  * @a: the amount to add to l...
  * @u: ...unless l is equal to u.
  *
- * Atomically adds @a to @l, so long as it was not @u.
- * Returns non-zero if @l was not @u, and zero otherwise.
+ * Atomically adds @a to @l, if @v was not already @u.
+ * Returns true if the addition was done.
  */
-#define local_add_unless(l, a, u)				\
-({								\
-	long c, old;						\
-	c = local_read(l);					\
-	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
-		c = old;					\
-	c != (u);						\
-})
+static __inline__ bool
+local_add_unless(local_t *l, long a, long u)
+{
+	long c = local_read(l);
+
+	do {
+		if (unlikely(c == u))
+			return false;
+	} while (!local_try_cmpxchg(l, &c, c + a));
+
+	return true;
+}
+
 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
 
 #define local_dec_return(l) local_sub_return(1, (l))
diff --git a/arch/powerpc/include/asm/local.h b/arch/powerpc/include/asm/local.h
index 45492fb..ec6ced6 100644
--- a/arch/powerpc/include/asm/local.h
+++ b/arch/powerpc/include/asm/local.h
@@ -115,23 +115,23 @@ static __inline__ long local_xchg(local_t *l, long n)
 }
 
 /**
- * local_add_unless - add unless the number is a given value
+ * local_add_unless - add unless the number is already a given value
  * @l: pointer of type local_t
  * @a: the amount to add to v...
  * @u: ...unless v is equal to u.
  *
- * Atomically adds @a to @l, so long as it was not @u.
- * Returns non-zero if @l was not @u, and zero otherwise.
+ * Atomically adds @a to @l, if @v was not already @u.
+ * Returns true if the addition was done.
  */
-static __inline__ int local_add_unless(local_t *l, long a, long u)
+static __inline__ bool local_add_unless(local_t *l, long a, long u)
 {
 	unsigned long flags;
-	int ret = 0;
+	bool ret = false;
 
 	powerpc_local_irq_pmu_save(flags);
 	if (l->v != u) {
 		l->v += a;
-		ret = 1;
+		ret = true;
 	}
 	powerpc_local_irq_pmu_restore(flags);
 
diff --git a/arch/x86/include/asm/local.h b/arch/x86/include/asm/local.h
index 635132a..73dba8b 100644
--- a/arch/x86/include/asm/local.h
+++ b/arch/x86/include/asm/local.h
@@ -135,28 +135,27 @@ static inline bool local_try_cmpxchg(local_t *l, long *old, long new)
 #define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
 
 /**
- * local_add_unless - add unless the number is a given value
+ * local_add_unless - add unless the number is already a given value
  * @l: pointer of type local_t
  * @a: the amount to add to l...
  * @u: ...unless l is equal to u.
  *
- * Atomically adds @a to @l, so long as it was not @u.
- * Returns non-zero if @l was not @u, and zero otherwise.
+ * Atomically adds @a to @l, if @v was not already @u.
+ * Returns true if the addition was done.
  */
-#define local_add_unless(l, a, u)				\
-({								\
-	long c, old;						\
-	c = local_read((l));					\
-	for (;;) {						\
-		if (unlikely(c == (u)))				\
-			break;					\
-		old = local_cmpxchg((l), c, c + (a));		\
-		if (likely(old == c))				\
-			break;					\
-		c = old;					\
-	}							\
-	c != (u);						\
-})
+static __always_inline bool
+local_add_unless(local_t *l, long a, long u)
+{
+	long c = local_read(l);
+
+	do {
+		if (unlikely(c == u))
+			return false;
+	} while (!local_try_cmpxchg(l, &c, c + a));
+
+	return true;
+}
+
 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
 
 /* On x86_32, these are no better than the atomic variants.

^ permalink raw reply related	[flat|nested] 4+ messages in thread

end of thread, other threads:[~2023-10-04 15:46 UTC | newest]

Thread overview: 4+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2023-07-31  8:42 [PATCH] locking/arch: Rewrite local_add_unless as static inline function Uros Bizjak
2023-08-07 16:13 ` Christophe Leroy
2023-08-07 16:36   ` Uros Bizjak
2023-10-04 15:46 ` [tip: locking/core] locking/local, arch: Rewrite local_add_unless() as a " tip-bot2 for Uros Bizjak

This is an external index of several public inboxes,
see mirroring instructions on how to clone and mirror
all data and code used by this external index.