All of lore.kernel.org
 help / color / mirror / Atom feed
* [dpdk-dev] [PATCH v1 1/3] eal/arm64: add 128-bit atomic compare exchange
@ 2019-06-23  2:41 Phil Yang
  2019-06-23  2:41 ` [dpdk-dev] [PATCH v1 2/3] test/atomic: add 128b compare and swap test Phil Yang
                   ` (7 more replies)
  0 siblings, 8 replies; 95+ messages in thread
From: Phil Yang @ 2019-06-23  2:41 UTC (permalink / raw)
  To: dev
  Cc: thomas, jerinj, hemant.agrawal, Honnappa.Nagarahalli, gavin.hu,
	nd, gage.eads

Add 128-bit atomic compare exchange on aarch64.

Signed-off-by: Phil Yang <phil.yang@arm.com>
Tested-by: Honnappa Nagarahalli <honnappa.nagarahalli@arm.com>
Reviewed-by: Honnappa Nagarahalli <honnappa.nagarahalli@arm.com>
---
This patch depends on
http://patchwork.dpdk.org/patch/54840/

v2:
eal/stack: fix 'pointer-sign' warning

 .../common/include/arch/arm/rte_atomic_64.h        | 184 +++++++++++++++++++++
 .../common/include/arch/x86/rte_atomic_64.h        |  12 --
 lib/librte_eal/common/include/generic/rte_atomic.h |  15 +-
 3 files changed, 198 insertions(+), 13 deletions(-)


diff --git a/lib/librte_eal/common/include/arch/arm/rte_atomic_64.h b/lib/librte_eal/common/include/arch/arm/rte_atomic_64.h
index 97060e4..4ef900c 100644
--- a/lib/librte_eal/common/include/arch/arm/rte_atomic_64.h
+++ b/lib/librte_eal/common/include/arch/arm/rte_atomic_64.h
@@ -1,5 +1,6 @@
 /* SPDX-License-Identifier: BSD-3-Clause
  * Copyright(c) 2015 Cavium, Inc
+ * Copyright(c) 2019 Arm Limited
  */
 
 #ifndef _RTE_ATOMIC_ARM64_H_
@@ -14,6 +15,9 @@ extern "C" {
 #endif
 
 #include "generic/rte_atomic.h"
+#include <rte_branch_prediction.h>
+#include <rte_compat.h>
+#include <rte_debug.h>
 
 #define dsb(opt) asm volatile("dsb " #opt : : : "memory")
 #define dmb(opt) asm volatile("dmb " #opt : : : "memory")
@@ -40,6 +44,186 @@ extern "C" {
 
 #define rte_cio_rmb() dmb(oshld)
 
+/*----------------------- 128 bit atomic operations -------------------------*/
+
+#define RTE_HAS_ACQ(mo) ((mo) != __ATOMIC_RELAXED && (mo) != __ATOMIC_RELEASE)
+#define RTE_HAS_RLS(mo) ((mo) == __ATOMIC_RELEASE || \
+			 (mo) == __ATOMIC_ACQ_REL || \
+			 (mo) == __ATOMIC_SEQ_CST)
+
+#define RTE_MO_LOAD(mo)  (RTE_HAS_ACQ((mo)) \
+		? __ATOMIC_ACQUIRE : __ATOMIC_RELAXED)
+#define RTE_MO_STORE(mo) (RTE_HAS_RLS((mo)) \
+		? __ATOMIC_RELEASE : __ATOMIC_RELAXED)
+
+#ifdef __ARM_FEATURE_ATOMICS
+static inline rte_int128_t
+__rte_casp(rte_int128_t *dst, rte_int128_t old, rte_int128_t updated, int mo)
+{
+
+	/* caspX instructions register pair must start from even-numbered
+	 * register at operand 1.
+	 * So, specify registers for local variables here.
+	 */
+	register uint64_t x0 __asm ("x0") = (uint64_t)old.val[0];
+	register uint64_t x1 __asm ("x1") = (uint64_t)old.val[1];
+	register uint64_t x2 __asm ("x2") = (uint64_t)updated.val[0];
+	register uint64_t x3 __asm ("x3") = (uint64_t)updated.val[1];
+
+	if (mo ==  __ATOMIC_RELAXED) {
+		asm volatile(
+				"casp %[old0], %[old1], %[upd0], %[upd1], [%[dst]]"
+				: [old0] "+r" (x0),
+				  [old1] "+r" (x1)
+				: [upd0] "r" (x2),
+				  [upd1] "r" (x3),
+				  [dst] "r" (dst)
+				: "memory");
+	} else if (mo == __ATOMIC_ACQUIRE) {
+		asm volatile(
+				"caspa %[old0], %[old1], %[upd0], %[upd1], [%[dst]]"
+				: [old0] "+r" (x0),
+				  [old1] "+r" (x1)
+				: [upd0] "r" (x2),
+				  [upd1] "r" (x3),
+				  [dst] "r" (dst)
+				: "memory");
+	} else if (mo == __ATOMIC_ACQ_REL) {
+		asm volatile(
+				"caspal %[old0], %[old1], %[upd0], %[upd1], [%[dst]]"
+				: [old0] "+r" (x0),
+				  [old1] "+r" (x1)
+				: [upd0] "r" (x2),
+				  [upd1] "r" (x3),
+				  [dst] "r" (dst)
+				: "memory");
+	} else if (mo == __ATOMIC_RELEASE) {
+		asm volatile(
+				"caspl %[old0], %[old1], %[upd0], %[upd1], [%[dst]]"
+				: [old0] "+r" (x0),
+				  [old1] "+r" (x1)
+				: [upd0] "r" (x2),
+				  [upd1] "r" (x3),
+				  [dst] "r" (dst)
+				: "memory");
+	} else {
+		rte_panic("Invalid memory order\n");
+	}
+
+	old.val[0] = x0;
+	old.val[1] = x1;
+
+	return old;
+}
+#else
+static inline rte_int128_t
+__rte_ldx128(const rte_int128_t *src, int mo)
+{
+	rte_int128_t ret;
+	if (mo == __ATOMIC_ACQUIRE)
+		asm volatile(
+				"ldaxp %0, %1, %2"
+				: "=&r" (ret.val[0]),
+				  "=&r" (ret.val[1])
+				: "Q" (src->val[0])
+				: "memory");
+	else if (mo == __ATOMIC_RELAXED)
+		asm volatile(
+				"ldxp %0, %1, %2"
+				: "=&r" (ret.val[0]),
+				  "=&r" (ret.val[1])
+				: "Q" (src->val[0])
+				: "memory");
+	else
+		rte_panic("Invalid memory order\n");
+
+	return ret;
+}
+
+static inline uint32_t
+__rte_stx128(rte_int128_t *dst, const rte_int128_t src, int mo)
+{
+	uint32_t ret;
+	if (mo == __ATOMIC_RELEASE)
+		asm volatile(
+				"stlxp %w0, %1, %2, %3"
+				: "=&r" (ret)
+				: "r" (src.val[0]),
+				  "r" (src.val[1]),
+				  "Q" (dst->val[0])
+				: "memory");
+	else if (mo == __ATOMIC_RELAXED)
+		asm volatile(
+				"stxp %w0, %1, %2, %3"
+				: "=&r" (ret)
+				: "r" (src.val[0]),
+				  "r" (src.val[1]),
+				  "Q" (dst->val[0])
+				: "memory");
+	else
+		rte_panic("Invalid memory order\n");
+
+	/* Return 0 on success, 1 on failure */
+	return ret;
+}
+#endif
+
+static inline int __rte_experimental
+rte_atomic128_cmp_exchange(rte_int128_t *dst,
+				rte_int128_t *exp,
+				const rte_int128_t *src,
+				unsigned int weak,
+				int success,
+				int failure)
+{
+	// Always do strong CAS
+	RTE_SET_USED(weak);
+	/* Ignore memory ordering for failure, memory order for
+	 * success must be stronger or equal
+	 */
+	RTE_SET_USED(failure);
+
+#ifdef __ARM_FEATURE_ATOMICS
+	rte_int128_t expected = *exp;
+	rte_int128_t desired = *src;
+	rte_int128_t old;
+
+	old = __rte_casp(dst, expected, desired, success);
+#else
+	int ldx_mo = RTE_MO_LOAD(success);
+	int stx_mo = RTE_MO_STORE(success);
+	uint32_t ret = 1;
+	register rte_int128_t expected = *exp;
+	register rte_int128_t desired = *src;
+	register rte_int128_t old;
+
+	/* ldx128 can not guarantee atomic,
+	 * Must write back src or old to verify atomicity of ldx128;
+	 */
+	do {
+		old = __rte_ldx128(dst, ldx_mo);
+		if (likely(old.int128 == expected.int128))
+			ret = __rte_stx128(dst, desired, stx_mo);
+		else
+			/* In the failure case (since 'weak' is ignored and only
+			 * weak == 0 is implemented), expected should contain the
+			 * atomically read value of dst. This means, 'old' needs
+			 * to be stored back to ensure it was read atomically.
+			 */
+			ret = __rte_stx128(dst, old, stx_mo);
+	} while (unlikely(ret));
+#endif
+
+	/* Unconditionally updating expected removes
+	 * an 'if' statement.
+	 * expected should already be in register if
+	 * not in the cache.
+	 */
+	*exp = old;
+
+	return (old.int128 == expected.int128);
+}
+
 #ifdef __cplusplus
 }
 #endif
diff --git a/lib/librte_eal/common/include/arch/x86/rte_atomic_64.h b/lib/librte_eal/common/include/arch/x86/rte_atomic_64.h
index 6232c57..23cf48f 100644
--- a/lib/librte_eal/common/include/arch/x86/rte_atomic_64.h
+++ b/lib/librte_eal/common/include/arch/x86/rte_atomic_64.h
@@ -212,18 +212,6 @@ static inline void rte_atomic64_clear(rte_atomic64_t *v)
 
 /*------------------------ 128 bit atomic operations -------------------------*/
 
-/**
- * 128-bit integer structure.
- */
-RTE_STD_C11
-typedef struct {
-	RTE_STD_C11
-	union {
-		uint64_t val[2];
-		__extension__ __int128 int128;
-	};
-} __rte_aligned(16) rte_int128_t;
-
 static inline int __rte_experimental
 rte_atomic128_cmp_exchange(rte_int128_t *dst,
 			   rte_int128_t *exp,
diff --git a/lib/librte_eal/common/include/generic/rte_atomic.h b/lib/librte_eal/common/include/generic/rte_atomic.h
index 9958543..7dd1aa4 100644
--- a/lib/librte_eal/common/include/generic/rte_atomic.h
+++ b/lib/librte_eal/common/include/generic/rte_atomic.h
@@ -1081,6 +1081,18 @@ static inline void rte_atomic64_clear(rte_atomic64_t *v)
 
 /*------------------------ 128 bit atomic operations -------------------------*/
 
+/**
+ * 128-bit integer structure.
+ */
+RTE_STD_C11
+typedef struct {
+	RTE_STD_C11
+	union {
+		uint64_t val[2];
+		__extension__ __int128 int128;
+	};
+} __rte_aligned(16) rte_int128_t;
+
 #ifdef __DOXYGEN__
 
 /**
@@ -1093,7 +1105,8 @@ static inline void rte_atomic64_clear(rte_atomic64_t *v)
  *     *exp = *dst
  * @endcode
  *
- * @note This function is currently only available for the x86-64 platform.
+ * @note This function is currently available for the x86-64 and aarch64
+ * platforms.
  *
  * @note The success and failure arguments must be one of the __ATOMIC_* values
  * defined in the C++11 standard. For details on their behavior, refer to the
-- 
2.7.4


^ permalink raw reply related	[flat|nested] 95+ messages in thread
* Re: [dpdk-dev] [PATCH v2 1/3] eal/arm64: add 128-bit atomic compare exchange
@ 2019-06-24  6:41 Jerin Jacob Kollanukkaran
  2019-06-24 15:43 ` Phil Yang (Arm Technology China)
  0 siblings, 1 reply; 95+ messages in thread
From: Jerin Jacob Kollanukkaran @ 2019-06-24  6:41 UTC (permalink / raw)
  To: Phil Yang, dev
  Cc: thomas, hemant.agrawal, Honnappa.Nagarahalli, gavin.hu, nd, gage.eads

> -----Original Message-----
> From: Phil Yang <phil.yang@arm.com>
> Sent: Sunday, June 23, 2019 8:46 AM
> To: dev@dpdk.org
> Cc: thomas@monjalon.net; Jerin Jacob Kollanukkaran <jerinj@marvell.com>;
> hemant.agrawal@nxp.com; Honnappa.Nagarahalli@arm.com;
> gavin.hu@arm.com; nd@arm.com; gage.eads@intel.com
> Subject: [EXT] [PATCH v2 1/3] eal/arm64: add 128-bit atomic compare
> exchange
> 
> Add 128-bit atomic compare exchange on aarch64.
> 
> Signed-off-by: Phil Yang <phil.yang@arm.com>
> Reviewed-by: Honnappa Nagarahalli <honnappa.nagarahalli@arm.com>
> Tested-by: Honnappa Nagarahalli <honnappa.nagarahalli@arm.com>
> ---
> This patch depends on 'eal/stack: fix 'pointer-sign' warning'
> http://patchwork.dpdk.org/patch/54840/
> 
> +
> +#ifdef __ARM_FEATURE_ATOMICS
> +static inline rte_int128_t
> +__rte_casp(rte_int128_t *dst, rte_int128_t old, rte_int128_t updated,
> +int mo) {

Better to change to "const int mo".

> +
> +	/* caspX instructions register pair must start from even-numbered
> +	 * register at operand 1.
> +	 * So, specify registers for local variables here.
> +	 */
> +	register uint64_t x0 __asm("x0") = (uint64_t)old.val[0];
> +	register uint64_t x1 __asm("x1") = (uint64_t)old.val[1];
> +	register uint64_t x2 __asm("x2") = (uint64_t)updated.val[0];
> +	register uint64_t x3 __asm("x3") = (uint64_t)updated.val[1];
> +
> +	if (mo ==  __ATOMIC_RELAXED) {
> +		asm volatile(
> +				"casp %[old0], %[old1], %[upd0], %[upd1],
> [%[dst]]"
> +				: [old0] "+r" (x0),
> +				  [old1] "+r" (x1)
> +				: [upd0] "r" (x2),
> +				  [upd1] "r" (x3),
> +				  [dst] "r" (dst)
> +				: "memory");
> +	} else if (mo == __ATOMIC_ACQUIRE) {
> +		asm volatile(
> +				"caspa %[old0], %[old1], %[upd0], %[upd1],
> [%[dst]]"
> +				: [old0] "+r" (x0),
> +				  [old1] "+r" (x1)
> +				: [upd0] "r" (x2),
> +				  [upd1] "r" (x3),
> +				  [dst] "r" (dst)
> +				: "memory");
> +	} else if (mo == __ATOMIC_ACQ_REL) {
> +		asm volatile(
> +				"caspal %[old0], %[old1], %[upd0], %[upd1],
> [%[dst]]"
> +				: [old0] "+r" (x0),
> +				  [old1] "+r" (x1)
> +				: [upd0] "r" (x2),
> +				  [upd1] "r" (x3),
> +				  [dst] "r" (dst)
> +				: "memory");
> +	} else if (mo == __ATOMIC_RELEASE) {
> +		asm volatile(
> +				"caspl %[old0], %[old1], %[upd0], %[upd1],
> [%[dst]]"
> +				: [old0] "+r" (x0),
> +				  [old1] "+r" (x1)
> +				: [upd0] "r" (x2),
> +				  [upd1] "r" (x3),
> +				  [dst] "r" (dst)
> +				: "memory");

I think, This duplication code can be avoid with macro and casp/capsa/casal/caspl as argument.

> +	} else {
> +		rte_panic("Invalid memory order\n");


rte_panic should be removed from library. In this case, I think, invalid mo can go for strongest barrier.

> +	}
> +
> +	old.val[0] = x0;
> +	old.val[1] = x1;
> +
> +	return old;
> +}
> +#else
> +static inline rte_int128_t
> +__rte_ldx128(const rte_int128_t *src, int mo) {
> +	rte_int128_t ret;
> +	if (mo == __ATOMIC_ACQUIRE)
> +		asm volatile(
> +				"ldaxp %0, %1, %2"
> +				: "=&r" (ret.val[0]),
> +				  "=&r" (ret.val[1])
> +				: "Q" (src->val[0])
> +				: "memory");
> +	else if (mo == __ATOMIC_RELAXED)
> +		asm volatile(
> +				"ldxp %0, %1, %2"
> +				: "=&r" (ret.val[0]),
> +				  "=&r" (ret.val[1])
> +				: "Q" (src->val[0])
> +				: "memory");

Same as above comment.

> +	else
> +		rte_panic("Invalid memory order\n");

Same as above comment.

> +
> +	return ret;
> +}
> +
> +static inline uint32_t
> +__rte_stx128(rte_int128_t *dst, const rte_int128_t src, int mo) {
> +	uint32_t ret;
> +	if (mo == __ATOMIC_RELEASE)
> +		asm volatile(
> +				"stlxp %w0, %1, %2, %3"
> +				: "=&r" (ret)
> +				: "r" (src.val[0]),
> +				  "r" (src.val[1]),
> +				  "Q" (dst->val[0])
> +				: "memory");
> +	else if (mo == __ATOMIC_RELAXED)
> +		asm volatile(
> +				"stxp %w0, %1, %2, %3"
> +				: "=&r" (ret)
> +				: "r" (src.val[0]),
> +				  "r" (src.val[1]),
> +				  "Q" (dst->val[0])
> +				: "memory");
> +	else
> +		rte_panic("Invalid memory order\n");

Same as above comment.

> +
> +	/* Return 0 on success, 1 on failure */
> +	return ret;
> +}
> +#endif
> +
> +static inline int __rte_experimental
> +rte_atomic128_cmp_exchange(rte_int128_t *dst,
> +				rte_int128_t *exp,
> +				const rte_int128_t *src,
> +				unsigned int weak,
> +				int success,
> +				int failure)
> +{
> +	// Always do strong CAS

Remove C++ style code comment.


^ permalink raw reply	[flat|nested] 95+ messages in thread

end of thread, other threads:[~2019-10-21  8:27 UTC | newest]

Thread overview: 95+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2019-06-23  2:41 [dpdk-dev] [PATCH v1 1/3] eal/arm64: add 128-bit atomic compare exchange Phil Yang
2019-06-23  2:41 ` [dpdk-dev] [PATCH v1 2/3] test/atomic: add 128b compare and swap test Phil Yang
2019-06-23  2:41 ` [dpdk-dev] [PATCH v1 3/3] eal/stack: enable lock-free stack for aarch64 Phil Yang
2019-06-23  3:15 ` [dpdk-dev] [PATCH v2 1/3] eal/arm64: add 128-bit atomic compare exchange Phil Yang
2019-06-23  3:15   ` [dpdk-dev] [PATCH v2 2/3] test/atomic: add 128b compare and swap test Phil Yang
2019-06-24 15:09     ` Eads, Gage
2019-06-24 15:29       ` Phil Yang (Arm Technology China)
2019-06-23  3:15   ` [dpdk-dev] [PATCH v2 3/3] eal/stack: enable lock-free stack for aarch64 Phil Yang
2019-06-24 15:15     ` Eads, Gage
2019-06-24 15:22       ` Phil Yang (Arm Technology China)
2019-06-24 14:46   ` [dpdk-dev] [PATCH v2 1/3] eal/arm64: add 128-bit atomic compare exchange Eads, Gage
2019-06-24 15:35     ` Phil Yang (Arm Technology China)
2019-06-28  8:11 ` [dpdk-dev] [PATCH v3 " Phil Yang
2019-06-28  8:11   ` [dpdk-dev] [PATCH v3 2/3] test/atomic: add 128b compare and swap test Phil Yang
2019-06-29  0:17     ` Eads, Gage
2019-07-19  4:03     ` [dpdk-dev] [EXT] " Jerin Jacob Kollanukkaran
2019-06-28  8:11   ` [dpdk-dev] [PATCH v3 3/3] eal/stack: enable lock-free stack for aarch64 Phil Yang
2019-06-29  0:18     ` Eads, Gage
2019-07-19  4:18     ` [dpdk-dev] [EXT] " Jerin Jacob Kollanukkaran
2019-07-19  4:42       ` Eads, Gage
2019-07-19  5:02         ` Jerin Jacob Kollanukkaran
2019-07-19  5:15           ` Phil Yang (Arm Technology China)
2019-07-03 12:25   ` [dpdk-dev] [EXT] [PATCH v3 1/3] eal/arm64: add 128-bit atomic compare exchange Jerin Jacob Kollanukkaran
2019-07-03 13:07     ` Jerin Jacob Kollanukkaran
2019-07-05  4:20       ` Honnappa Nagarahalli
2019-07-05  4:37         ` Pavan Nikhilesh Bhagavatula
2019-07-09  9:27           ` Phil Yang (Arm Technology China)
2019-07-09 11:14             ` Jerin Jacob Kollanukkaran
2019-07-19  6:24   ` Jerin Jacob Kollanukkaran
2019-07-19 11:01     ` Phil Yang (Arm Technology China)
2019-07-19 12:35       ` Jerin Jacob Kollanukkaran
2019-07-19 13:56         ` Phil Yang (Arm Technology China)
2019-07-19 14:50           ` Eads, Gage
2019-07-22  8:44 ` [dpdk-dev] [PATCH v4 " Phil Yang
2019-07-22  8:44   ` [dpdk-dev] [PATCH v4 2/3] test/atomic: add 128b compare and swap test Phil Yang
2019-07-22  8:44   ` [dpdk-dev] [PATCH v4 3/3] eal/stack: enable lock-free stack for aarch64 Phil Yang
2019-07-22 10:22     ` [dpdk-dev] [EXT] " Jerin Jacob Kollanukkaran
2019-07-22 11:51       ` Phil Yang (Arm Technology China)
2019-07-22 10:20   ` [dpdk-dev] [EXT] [PATCH v4 1/3] eal/arm64: add 128-bit atomic compare exchange Jerin Jacob Kollanukkaran
2019-07-22 11:50     ` Phil Yang (Arm Technology China)
2019-07-22 13:06 ` [dpdk-dev] [PATCH v5 " Phil Yang
2019-07-22 13:06   ` [dpdk-dev] [PATCH v5 2/3] test/atomic: add 128b compare and swap test Phil Yang
2019-07-22 13:06   ` [dpdk-dev] [PATCH v5 3/3] eal/stack: enable lock-free stack for aarch64 Phil Yang
2019-07-22 14:14     ` [dpdk-dev] [EXT] " Jerin Jacob Kollanukkaran
2019-07-22 15:19       ` Phil Yang (Arm Technology China)
2019-07-22 14:34     ` [dpdk-dev] " Eads, Gage
2019-07-22 14:43       ` Phil Yang (Arm Technology China)
2019-07-22 14:19   ` [dpdk-dev] [EXT] [PATCH v5 1/3] eal/arm64: add 128-bit atomic compare exchange Jerin Jacob Kollanukkaran
2019-07-22 16:23     ` Phil Yang (Arm Technology China)
2019-07-22 16:22 ` [dpdk-dev] [PATCH v6 " Phil Yang
2019-07-22 16:22   ` [dpdk-dev] [PATCH v6 2/3] test/atomic: add 128b compare and swap test Phil Yang
2019-07-22 16:22   ` [dpdk-dev] [PATCH v6 3/3] eal/stack: enable lock-free stack for aarch64 Phil Yang
2019-07-22 16:59     ` [dpdk-dev] [EXT] " Jerin Jacob Kollanukkaran
2019-07-22 16:57   ` [dpdk-dev] [EXT] [PATCH v6 1/3] eal/arm64: add 128-bit atomic compare exchange Jerin Jacob Kollanukkaran
2019-07-23  3:28     ` Phil Yang (Arm Technology China)
2019-07-23  7:09       ` Jerin Jacob Kollanukkaran
2019-07-23  7:53         ` Phil Yang (Arm Technology China)
2019-07-23  5:57 ` [dpdk-dev] [PATCH v7 " Phil Yang
2019-07-23  5:57   ` [dpdk-dev] [PATCH v7 2/3] test/atomic: add 128b compare and swap test Phil Yang
2019-07-23  5:57   ` [dpdk-dev] [PATCH v7 3/3] eal/stack: enable lock-free stack for aarch64 Phil Yang
2019-07-23  7:05   ` [dpdk-dev] [PATCH v8 1/3] eal/arm64: add 128-bit atomic compare exchange jerinj
2019-07-23  7:05     ` [dpdk-dev] [PATCH v8 2/3] test/atomic: add 128b compare and swap test jerinj
2019-07-23  7:05     ` [dpdk-dev] [PATCH v8 3/3] eal/stack: enable lock-free stack for aarch64 jerinj
2019-08-14  8:27     ` [dpdk-dev] [PATCH v9 1/3] eal/arm64: add 128-bit atomic compare exchange Phil Yang
2019-08-14  8:27       ` [dpdk-dev] [PATCH v9 2/3] test/atomic: add 128b compare and swap test Phil Yang
2019-10-14 15:45         ` David Marchand
2019-10-15 11:32           ` Phil Yang (Arm Technology China)
2019-08-14  8:27       ` [dpdk-dev] [PATCH v9 3/3] eal/stack: enable lock-free stack for aarch64 Phil Yang
2019-10-14 15:45         ` David Marchand
2019-10-15 11:32           ` Phil Yang (Arm Technology China)
2019-10-14 15:43       ` [dpdk-dev] [PATCH v9 1/3] eal/arm64: add 128-bit atomic compare exchange David Marchand
2019-10-15 11:32         ` Phil Yang (Arm Technology China)
2019-10-15 12:16           ` David Marchand
2019-10-16  9:04             ` Phil Yang (Arm Technology China)
2019-10-17 12:45               ` David Marchand
2019-10-15 11:38       ` [dpdk-dev] [PATCH v10 " Phil Yang
2019-10-15 11:38         ` [dpdk-dev] [PATCH v10 2/3] test/atomic: add 128b compare and swap test Phil Yang
2019-10-15 11:38         ` [dpdk-dev] [PATCH v10 3/3] eal/stack: enable lock-free stack for aarch64 Phil Yang
2019-10-18 11:21         ` [dpdk-dev] [PATCH v11 1/3] eal/arm64: add 128-bit atomic compare exchange Phil Yang
2019-10-18 11:21           ` [dpdk-dev] [PATCH v11 2/3] test/atomic: add 128b compare and swap test Phil Yang
2019-10-21  8:25             ` David Marchand
2019-10-18 11:21           ` [dpdk-dev] [PATCH v11 3/3] eal/stack: enable lock-free stack for aarch64 Phil Yang
2019-10-21  8:26             ` David Marchand
2019-10-18 14:16           ` [dpdk-dev] [PATCH v11 1/3] eal/arm64: add 128-bit atomic compare exchange David Marchand
2019-10-18 14:24             ` Jerin Jacob
2019-10-18 14:33               ` David Marchand
2019-10-18 14:36                 ` Jerin Jacob
2019-10-21  8:24                   ` David Marchand
2019-06-24  6:41 [dpdk-dev] [PATCH v2 " Jerin Jacob Kollanukkaran
2019-06-24 15:43 ` Phil Yang (Arm Technology China)
2019-06-24 16:12   ` Honnappa Nagarahalli
2019-06-24 16:25     ` Thomas Monjalon
2019-06-24 17:41       ` Honnappa Nagarahalli
2019-06-25  6:15         ` Jerin Jacob Kollanukkaran
2019-06-26 10:10           ` Phil Yang (Arm Technology China)

This is an external index of several public inboxes,
see mirroring instructions on how to clone and mirror
all data and code used by this external index.