<snip>
+#define __RTE_GEN_BIT_ATOMIC_TEST(v, qualifier, size) \
__rte_experimental \
static inline bool \
- __rte_bit_atomic_test ## size(const uint ## size ## _t *addr, \
- unsigned int nr, int memory_order) \
+ __rte_bit_atomic_ ## v ## test ## size(const qualifier uint ## size ##
_t *addr, \
+ unsigned int nr, int
memory_order) \
{ \
RTE_ASSERT(nr < size); \
\
- const RTE_ATOMIC(uint ## size ## _t) *a_addr = \
- (const RTE_ATOMIC(uint ## size ## _t) *)addr; \
+ const qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
+ (const qualifier RTE_ATOMIC(uint ## size ## _t) *)addr;
\
uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
return rte_atomic_load_explicit(a_addr, memory_order) & mask; \
}
-#define __RTE_GEN_BIT_ATOMIC_SET(size) \
+#define __RTE_GEN_BIT_ATOMIC_SET(v, qualifier, size) \
__rte_experimental \
static inline void \
- __rte_bit_atomic_set ## size(uint ## size ## _t *addr, \
- unsigned int nr, int memory_order) \
+ __rte_bit_atomic_ ## v ## set ## size(qualifier uint ## size ## _t
*addr, \
+ unsigned int nr, int
memory_order) \
{ \
RTE_ASSERT(nr < size); \
\
- RTE_ATOMIC(uint ## size ## _t) *a_addr = \
- (RTE_ATOMIC(uint ## size ## _t) *)addr; \
+ qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
+ (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
rte_atomic_fetch_or_explicit(a_addr, mask, memory_order); \
}
-#define __RTE_GEN_BIT_ATOMIC_CLEAR(size) \
+#define __RTE_GEN_BIT_ATOMIC_CLEAR(v, qualifier, size) \
__rte_experimental \
static inline void \
- __rte_bit_atomic_clear ## size(uint ## size ## _t *addr, \
- unsigned int nr, int memory_order) \
+ __rte_bit_atomic_ ## v ## clear ## size(qualifier uint ## size ## _t
*addr, \
+ unsigned int nr, int
memory_order) \
{ \
RTE_ASSERT(nr < size); \
\
- RTE_ATOMIC(uint ## size ## _t) *a_addr = \
- (RTE_ATOMIC(uint ## size ## _t) *)addr; \
+ qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
+ (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
rte_atomic_fetch_and_explicit(a_addr, ~mask, memory_order); \
}
-#define __RTE_GEN_BIT_ATOMIC_FLIP(size) \
+#define __RTE_GEN_BIT_ATOMIC_FLIP(v, qualifier, size) \
__rte_experimental \
static inline void \
- __rte_bit_atomic_flip ## size(uint ## size ## _t *addr, \
- unsigned int nr, int memory_order) \
+ __rte_bit_atomic_ ## v ## flip ## size(qualifier uint ## size ## _t
*addr, \
+ unsigned int nr, int
memory_order) \
{ \
RTE_ASSERT(nr < size); \
\
- RTE_ATOMIC(uint ## size ## _t) *a_addr = \
- (RTE_ATOMIC(uint ## size ## _t) *)addr; \
+ qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
+ (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
rte_atomic_fetch_xor_explicit(a_addr, mask, memory_order); \
}
-#define __RTE_GEN_BIT_ATOMIC_ASSIGN(size) \
+#define __RTE_GEN_BIT_ATOMIC_ASSIGN(v, qualifier, size)
\
__rte_experimental \
static inline void \
- __rte_bit_atomic_assign ## size(uint ## size ## _t *addr, \
- unsigned int nr, bool value, \
- int memory_order) \
+ __rte_bit_atomic_## v ## assign ## size(qualifier uint ## size ## _t
*addr, \
+ unsigned int nr, bool value, \
+ int memory_order) \
{ \
if (value) \
- __rte_bit_atomic_set ## size(addr, nr, memory_order); \
+ __rte_bit_atomic_ ## v ## set ## size(addr, nr,
memory_order); \
else \
- __rte_bit_atomic_clear ## size(addr, nr, \
- memory_order); \
+ __rte_bit_atomic_ ## v ## clear ## size(addr, nr, \
+
memory_order); \
}
-#define __RTE_GEN_BIT_ATOMIC_TEST_AND_SET(size) \
+#define __RTE_GEN_BIT_ATOMIC_TEST_AND_SET(v, qualifier, size) \
__rte_experimental \
static inline bool \
- __rte_bit_atomic_test_and_set ## size(uint ## size ## _t *addr, \
- unsigned int nr, \
- int memory_order) \
+ __rte_bit_atomic_ ## v ## test_and_set ## size(qualifier uint ## size
## _t *addr, \
+ unsigned int nr, \
+ int memory_order) \
{ \
RTE_ASSERT(nr < size); \
\
- RTE_ATOMIC(uint ## size ## _t) *a_addr = \
- (RTE_ATOMIC(uint ## size ## _t) *)addr; \
+ qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
+ (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
uint ## size ## _t prev; \
\
@@ -587,17 +632,17 @@ __RTE_GEN_BIT_FLIP(, flip,, 64)
return prev & mask; \
}
-#define __RTE_GEN_BIT_ATOMIC_TEST_AND_CLEAR(size) \
+#define __RTE_GEN_BIT_ATOMIC_TEST_AND_CLEAR(v, qualifier, size)
\
__rte_experimental \
static inline bool \
- __rte_bit_atomic_test_and_clear ## size(uint ## size ## _t *addr, \
- unsigned int nr, \
- int memory_order) \
+ __rte_bit_atomic_ ## v ## test_and_clear ## size(qualifier uint ## size
## _t *addr, \
+ unsigned int nr, \
+ int memory_order) \
{ \
RTE_ASSERT(nr < size); \
\
- RTE_ATOMIC(uint ## size ## _t) *a_addr = \
- (RTE_ATOMIC(uint ## size ## _t) *)addr; \
+ qualifier RTE_ATOMIC(uint ## size ## _t) *a_addr = \
+ (qualifier RTE_ATOMIC(uint ## size ## _t) *)addr; \
uint ## size ## _t mask = (uint ## size ## _t)1 << nr; \
uint ## size ## _t prev; \
\
@@ -607,34 +652,36 @@ __RTE_GEN_BIT_FLIP(, flip,, 64)
return prev & mask; \
}
-#define __RTE_GEN_BIT_ATOMIC_TEST_AND_ASSIGN(size) \
+#define __RTE_GEN_BIT_ATOMIC_TEST_AND_ASSIGN(v, qualifier, size) \
__rte_experimental \
static inline bool \
- __rte_bit_atomic_test_and_assign ## size(uint ## size ## _t *addr, \
- unsigned int nr, \
- bool value, \
- int memory_order) \
+ __rte_bit_atomic_ ## v ## test_and_assign ## size(qualifier uint ##
size ## _t *addr, \
+ unsigned int nr, \
+ bool value, \
+ int memory_order) \
{ \
if (value) \
- return __rte_bit_atomic_test_and_set ## size(addr, nr, \
-
memory_order); \
+ return __rte_bit_atomic_ ## v ## test_and_set ##
size(addr, nr, memory_order); \
else \
- return __rte_bit_atomic_test_and_clear ## size(addr,
nr, \
-
memory_order); \
+ return __rte_bit_atomic_ ## v ## test_and_clear ##
size(addr, nr, memory_order); \
}
-#define __RTE_GEN_BIT_ATOMIC_OPS(size) \
- __RTE_GEN_BIT_ATOMIC_TEST(size) \
- __RTE_GEN_BIT_ATOMIC_SET(size) \
- __RTE_GEN_BIT_ATOMIC_CLEAR(size) \
- __RTE_GEN_BIT_ATOMIC_ASSIGN(size) \
- __RTE_GEN_BIT_ATOMIC_TEST_AND_SET(size) \
- __RTE_GEN_BIT_ATOMIC_TEST_AND_CLEAR(size) \
- __RTE_GEN_BIT_ATOMIC_TEST_AND_ASSIGN(size) \
- __RTE_GEN_BIT_ATOMIC_FLIP(size)
+#define __RTE_GEN_BIT_ATOMIC_OPS(v, qualifier, size) \
+ __RTE_GEN_BIT_ATOMIC_TEST(v, qualifier, size) \
+ __RTE_GEN_BIT_ATOMIC_SET(v, qualifier, size) \
+ __RTE_GEN_BIT_ATOMIC_CLEAR(v, qualifier, size) \
+ __RTE_GEN_BIT_ATOMIC_ASSIGN(v, qualifier, size) \
+ __RTE_GEN_BIT_ATOMIC_TEST_AND_SET(v, qualifier, size) \
+ __RTE_GEN_BIT_ATOMIC_TEST_AND_CLEAR(v, qualifier, size) \
+ __RTE_GEN_BIT_ATOMIC_TEST_AND_ASSIGN(v, qualifier, size) \
+ __RTE_GEN_BIT_ATOMIC_FLIP(v, qualifier, size)
-__RTE_GEN_BIT_ATOMIC_OPS(32)
-__RTE_GEN_BIT_ATOMIC_OPS(64)
+#define __RTE_GEN_BIT_ATOMIC_OPS_SIZE(size) \
+ __RTE_GEN_BIT_ATOMIC_OPS(,, size) \
+ __RTE_GEN_BIT_ATOMIC_OPS(v_, volatile, size)
+
+__RTE_GEN_BIT_ATOMIC_OPS_SIZE(32)
+__RTE_GEN_BIT_ATOMIC_OPS_SIZE(64)