17.10.2023 21:31, Tyler Retzlaff пишет:
Replace the use of gcc builtin __atomic_xxx intrinsics with
corresponding rte_atomic_xxx optional stdatomic API

Signed-off-by: Tyler Retzlaff <roret...@linux.microsoft.com>
---
  lib/stack/rte_stack.h            |  2 +-
  lib/stack/rte_stack_lf_c11.h     | 24 ++++++++++++------------
  lib/stack/rte_stack_lf_generic.h | 18 +++++++++---------
  3 files changed, 22 insertions(+), 22 deletions(-)

diff --git a/lib/stack/rte_stack.h b/lib/stack/rte_stack.h
index 921d29a..a379300 100644
--- a/lib/stack/rte_stack.h
+++ b/lib/stack/rte_stack.h
@@ -44,7 +44,7 @@ struct rte_stack_lf_list {
        /** List head */
        struct rte_stack_lf_head head __rte_aligned(16);
        /** List len */
-       uint64_t len;
+       RTE_ATOMIC(uint64_t) len;
  };
/* Structure containing two lock-free LIFO lists: the stack itself and a list
diff --git a/lib/stack/rte_stack_lf_c11.h b/lib/stack/rte_stack_lf_c11.h
index 687a6f6..9cb6998 100644
--- a/lib/stack/rte_stack_lf_c11.h
+++ b/lib/stack/rte_stack_lf_c11.h
@@ -26,8 +26,8 @@
         * elements. If the mempool is near-empty to the point that this is a
         * concern, the user should consider increasing the mempool size.
         */
-       return (unsigned int)__atomic_load_n(&s->stack_lf.used.len,
-                                            __ATOMIC_RELAXED);
+       return (unsigned int)rte_atomic_load_explicit(&s->stack_lf.used.len,
+                                            rte_memory_order_relaxed);
  }
static __rte_always_inline void
@@ -59,14 +59,14 @@
                                (rte_int128_t *)&list->head,
                                (rte_int128_t *)&old_head,
                                (rte_int128_t *)&new_head,
-                               1, __ATOMIC_RELEASE,
-                               __ATOMIC_RELAXED);
+                               1, rte_memory_order_release,
+                               rte_memory_order_relaxed);
        } while (success == 0);
/* Ensure the stack modifications are not reordered with respect
         * to the LIFO len update.
         */
-       __atomic_fetch_add(&list->len, num, __ATOMIC_RELEASE);
+       rte_atomic_fetch_add_explicit(&list->len, num, 
rte_memory_order_release);
  }
static __rte_always_inline struct rte_stack_lf_elem *
@@ -80,7 +80,7 @@
        int success;
/* Reserve num elements, if available */
-       len = __atomic_load_n(&list->len, __ATOMIC_RELAXED);
+       len = rte_atomic_load_explicit(&list->len, rte_memory_order_relaxed);
while (1) {
                /* Does the list contain enough elements? */
@@ -88,10 +88,10 @@
                        return NULL;
/* len is updated on failure */
-               if (__atomic_compare_exchange_n(&list->len,
+               if (rte_atomic_compare_exchange_weak_explicit(&list->len,
                                                &len, len - num,
-                                               1, __ATOMIC_ACQUIRE,
-                                               __ATOMIC_RELAXED))
+                                               rte_memory_order_acquire,
+                                               rte_memory_order_relaxed))
                        break;
        }
@@ -110,7 +110,7 @@
                 * elements are properly ordered with respect to the head
                 * pointer read.
                 */
-               __atomic_thread_fence(__ATOMIC_ACQUIRE);
+               __atomic_thread_fence(rte_memory_order_acquire);
rte_prefetch0(old_head.top); @@ -159,8 +159,8 @@
                                (rte_int128_t *)&list->head,
                                (rte_int128_t *)&old_head,
                                (rte_int128_t *)&new_head,
-                               0, __ATOMIC_RELAXED,
-                               __ATOMIC_RELAXED);
+                               0, rte_memory_order_relaxed,
+                               rte_memory_order_relaxed);
        } while (success == 0);
return old_head.top;
diff --git a/lib/stack/rte_stack_lf_generic.h b/lib/stack/rte_stack_lf_generic.h
index 39f7ff3..cc69e4d 100644
--- a/lib/stack/rte_stack_lf_generic.h
+++ b/lib/stack/rte_stack_lf_generic.h
@@ -27,7 +27,7 @@
         * concern, the user should consider increasing the mempool size.
         */
        /* NOTE: review for potential ordering optimization */
-       return __atomic_load_n(&s->stack_lf.used.len, __ATOMIC_SEQ_CST);
+       return rte_atomic_load_explicit(&s->stack_lf.used.len, 
rte_memory_order_seq_cst);
  }
static __rte_always_inline void
@@ -64,11 +64,11 @@
                                (rte_int128_t *)&list->head,
                                (rte_int128_t *)&old_head,
                                (rte_int128_t *)&new_head,
-                               1, __ATOMIC_RELEASE,
-                               __ATOMIC_RELAXED);
+                               1, rte_memory_order_release,
+                               rte_memory_order_relaxed);
        } while (success == 0);
        /* NOTE: review for potential ordering optimization */
-       __atomic_fetch_add(&list->len, num, __ATOMIC_SEQ_CST);
+       rte_atomic_fetch_add_explicit(&list->len, num, 
rte_memory_order_seq_cst);
  }
static __rte_always_inline struct rte_stack_lf_elem *
@@ -83,15 +83,15 @@
        /* Reserve num elements, if available */
        while (1) {
                /* NOTE: review for potential ordering optimization */
-               uint64_t len = __atomic_load_n(&list->len, __ATOMIC_SEQ_CST);
+               uint64_t len = rte_atomic_load_explicit(&list->len, 
rte_memory_order_seq_cst);
/* Does the list contain enough elements? */
                if (unlikely(len < num))
                        return NULL;
/* NOTE: review for potential ordering optimization */
-               if (__atomic_compare_exchange_n(&list->len, &len, len - num,
-                               0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST))
+               if (rte_atomic_compare_exchange_strong_explicit(&list->len, 
&len, len - num,
+                               rte_memory_order_seq_cst, 
rte_memory_order_seq_cst))
                        break;
        }
@@ -143,8 +143,8 @@
                                (rte_int128_t *)&list->head,
                                (rte_int128_t *)&old_head,
                                (rte_int128_t *)&new_head,
-                               1, __ATOMIC_RELEASE,
-                               __ATOMIC_RELAXED);
+                               1, rte_memory_order_release,
+                               rte_memory_order_relaxed);
        } while (success == 0);
return old_head.top;

Acked-by: Konstantin Ananyev <konstantin.v.anan...@yandex.ru>

Reply via email to