Replace the use of gcc builtin __atomic_xxx intrinsics with
corresponding rte_atomic_xxx optional rte stdatomic API.

Signed-off-by: Tyler Retzlaff <roret...@linux.microsoft.com>
Acked-by: Stephen Hemminger <step...@networkplumber.org>
---
 app/test-bbdev/test_bbdev_perf.c | 183 +++++++++++++++++++++++----------------
 1 file changed, 110 insertions(+), 73 deletions(-)

diff --git a/app/test-bbdev/test_bbdev_perf.c b/app/test-bbdev/test_bbdev_perf.c
index dcce00a..9694ed3 100644
--- a/app/test-bbdev/test_bbdev_perf.c
+++ b/app/test-bbdev/test_bbdev_perf.c
@@ -144,7 +144,7 @@ struct test_op_params {
        uint16_t num_to_process;
        uint16_t num_lcores;
        int vector_mask;
-       uint16_t sync;
+       RTE_ATOMIC(uint16_t) sync;
        struct test_buffers q_bufs[RTE_MAX_NUMA_NODES][MAX_QUEUES];
 };
 
@@ -159,9 +159,9 @@ struct thread_params {
        uint8_t iter_count;
        double iter_average;
        double bler;
-       uint16_t nb_dequeued;
-       int16_t processing_status;
-       uint16_t burst_sz;
+       RTE_ATOMIC(uint16_t) nb_dequeued;
+       RTE_ATOMIC(int16_t) processing_status;
+       RTE_ATOMIC(uint16_t) burst_sz;
        struct test_op_params *op_params;
        struct rte_bbdev_dec_op *dec_ops[MAX_BURST];
        struct rte_bbdev_enc_op *enc_ops[MAX_BURST];
@@ -3195,56 +3195,64 @@ typedef int (test_case_function)(struct active_device 
*ad,
        }
 
        if (unlikely(event != RTE_BBDEV_EVENT_DEQUEUE)) {
-               __atomic_store_n(&tp->processing_status, TEST_FAILED, 
__ATOMIC_RELAXED);
+               rte_atomic_store_explicit(&tp->processing_status, TEST_FAILED,
+                   rte_memory_order_relaxed);
                printf(
                        "Dequeue interrupt handler called for incorrect 
event!\n");
                return;
        }
 
-       burst_sz = __atomic_load_n(&tp->burst_sz, __ATOMIC_RELAXED);
+       burst_sz = rte_atomic_load_explicit(&tp->burst_sz, 
rte_memory_order_relaxed);
        num_ops = tp->op_params->num_to_process;
 
        if (test_vector.op_type == RTE_BBDEV_OP_TURBO_DEC)
                deq = rte_bbdev_dequeue_dec_ops(dev_id, queue_id,
                                &tp->dec_ops[
-                                       __atomic_load_n(&tp->nb_dequeued, 
__ATOMIC_RELAXED)],
+                                       
rte_atomic_load_explicit(&tp->nb_dequeued,
+                                           rte_memory_order_relaxed)],
                                burst_sz);
        else if (test_vector.op_type == RTE_BBDEV_OP_LDPC_DEC)
                deq = rte_bbdev_dequeue_ldpc_dec_ops(dev_id, queue_id,
                                &tp->dec_ops[
-                                       __atomic_load_n(&tp->nb_dequeued, 
__ATOMIC_RELAXED)],
+                                       
rte_atomic_load_explicit(&tp->nb_dequeued,
+                                           rte_memory_order_relaxed)],
                                burst_sz);
        else if (test_vector.op_type == RTE_BBDEV_OP_LDPC_ENC)
                deq = rte_bbdev_dequeue_ldpc_enc_ops(dev_id, queue_id,
                                &tp->enc_ops[
-                                       __atomic_load_n(&tp->nb_dequeued, 
__ATOMIC_RELAXED)],
+                                       
rte_atomic_load_explicit(&tp->nb_dequeued,
+                                           rte_memory_order_relaxed)],
                                burst_sz);
        else if (test_vector.op_type == RTE_BBDEV_OP_FFT)
                deq = rte_bbdev_dequeue_fft_ops(dev_id, queue_id,
                                &tp->fft_ops[
-                                       __atomic_load_n(&tp->nb_dequeued, 
__ATOMIC_RELAXED)],
+                                       
rte_atomic_load_explicit(&tp->nb_dequeued,
+                                           rte_memory_order_relaxed)],
                                burst_sz);
        else if (test_vector.op_type == RTE_BBDEV_OP_MLDTS)
                deq = rte_bbdev_dequeue_mldts_ops(dev_id, queue_id,
                                &tp->mldts_ops[
-                                       __atomic_load_n(&tp->nb_dequeued, 
__ATOMIC_RELAXED)],
+                                       
rte_atomic_load_explicit(&tp->nb_dequeued,
+                                           rte_memory_order_relaxed)],
                                burst_sz);
        else /*RTE_BBDEV_OP_TURBO_ENC*/
                deq = rte_bbdev_dequeue_enc_ops(dev_id, queue_id,
                                &tp->enc_ops[
-                                       __atomic_load_n(&tp->nb_dequeued, 
__ATOMIC_RELAXED)],
+                                       
rte_atomic_load_explicit(&tp->nb_dequeued,
+                                           rte_memory_order_relaxed)],
                                burst_sz);
 
        if (deq < burst_sz) {
                printf(
                        "After receiving the interrupt all operations should be 
dequeued. Expected: %u, got: %u\n",
                        burst_sz, deq);
-               __atomic_store_n(&tp->processing_status, TEST_FAILED, 
__ATOMIC_RELAXED);
+               rte_atomic_store_explicit(&tp->processing_status, TEST_FAILED,
+                   rte_memory_order_relaxed);
                return;
        }
 
-       if (__atomic_load_n(&tp->nb_dequeued, __ATOMIC_RELAXED) + deq < 
num_ops) {
-               __atomic_fetch_add(&tp->nb_dequeued, deq, __ATOMIC_RELAXED);
+       if (rte_atomic_load_explicit(&tp->nb_dequeued, 
rte_memory_order_relaxed) + deq < num_ops) {
+               rte_atomic_fetch_add_explicit(&tp->nb_dequeued, deq, 
rte_memory_order_relaxed);
                return;
        }
 
@@ -3288,7 +3296,8 @@ typedef int (test_case_function)(struct active_device *ad,
 
        if (ret) {
                printf("Buffers validation failed\n");
-               __atomic_store_n(&tp->processing_status, TEST_FAILED, 
__ATOMIC_RELAXED);
+               rte_atomic_store_explicit(&tp->processing_status, TEST_FAILED,
+                   rte_memory_order_relaxed);
        }
 
        switch (test_vector.op_type) {
@@ -3315,7 +3324,8 @@ typedef int (test_case_function)(struct active_device *ad,
                break;
        default:
                printf("Unknown op type: %d\n", test_vector.op_type);
-               __atomic_store_n(&tp->processing_status, TEST_FAILED, 
__ATOMIC_RELAXED);
+               rte_atomic_store_explicit(&tp->processing_status, TEST_FAILED,
+                   rte_memory_order_relaxed);
                return;
        }
 
@@ -3324,7 +3334,7 @@ typedef int (test_case_function)(struct active_device *ad,
        tp->mbps += (((double)(num_ops * tb_len_bits)) / 1000000.0) /
                        ((double)total_time / (double)rte_get_tsc_hz());
 
-       __atomic_fetch_add(&tp->nb_dequeued, deq, __ATOMIC_RELAXED);
+       rte_atomic_fetch_add_explicit(&tp->nb_dequeued, deq, 
rte_memory_order_relaxed);
 }
 
 static int
@@ -3362,10 +3372,11 @@ typedef int (test_case_function)(struct active_device 
*ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       __atomic_store_n(&tp->processing_status, 0, __ATOMIC_RELAXED);
-       __atomic_store_n(&tp->nb_dequeued, 0, __ATOMIC_RELAXED);
+       rte_atomic_store_explicit(&tp->processing_status, 0, 
rte_memory_order_relaxed);
+       rte_atomic_store_explicit(&tp->nb_dequeued, 0, 
rte_memory_order_relaxed);
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_dec_op_alloc_bulk(tp->op_params->mp, ops,
                                num_to_process);
@@ -3415,15 +3426,17 @@ typedef int (test_case_function)(struct active_device 
*ad,
                         * the number of operations is not a multiple of
                         * burst size.
                         */
-                       __atomic_store_n(&tp->burst_sz, num_to_enq, 
__ATOMIC_RELAXED);
+                       rte_atomic_store_explicit(&tp->burst_sz, num_to_enq,
+                           rte_memory_order_relaxed);
 
                        /* Wait until processing of previous batch is
                         * completed
                         */
-                       rte_wait_until_equal_16(&tp->nb_dequeued, enqueued, 
__ATOMIC_RELAXED);
+                       rte_wait_until_equal_16((uint16_t 
*)(uintptr_t)&tp->nb_dequeued, enqueued,
+                           rte_memory_order_relaxed);
                }
                if (j != TEST_REPETITIONS - 1)
-                       __atomic_store_n(&tp->nb_dequeued, 0, __ATOMIC_RELAXED);
+                       rte_atomic_store_explicit(&tp->nb_dequeued, 0, 
rte_memory_order_relaxed);
        }
 
        return TEST_SUCCESS;
@@ -3459,10 +3472,11 @@ typedef int (test_case_function)(struct active_device 
*ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       __atomic_store_n(&tp->processing_status, 0, __ATOMIC_RELAXED);
-       __atomic_store_n(&tp->nb_dequeued, 0, __ATOMIC_RELAXED);
+       rte_atomic_store_explicit(&tp->processing_status, 0, 
rte_memory_order_relaxed);
+       rte_atomic_store_explicit(&tp->nb_dequeued, 0, 
rte_memory_order_relaxed);
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_dec_op_alloc_bulk(tp->op_params->mp, ops,
                                num_to_process);
@@ -3506,15 +3520,17 @@ typedef int (test_case_function)(struct active_device 
*ad,
                         * the number of operations is not a multiple of
                         * burst size.
                         */
-                       __atomic_store_n(&tp->burst_sz, num_to_enq, 
__ATOMIC_RELAXED);
+                       rte_atomic_store_explicit(&tp->burst_sz, num_to_enq,
+                           rte_memory_order_relaxed);
 
                        /* Wait until processing of previous batch is
                         * completed
                         */
-                       rte_wait_until_equal_16(&tp->nb_dequeued, enqueued, 
__ATOMIC_RELAXED);
+                       rte_wait_until_equal_16((uint16_t 
*)(uintptr_t)&tp->nb_dequeued, enqueued,
+                           rte_memory_order_relaxed);
                }
                if (j != TEST_REPETITIONS - 1)
-                       __atomic_store_n(&tp->nb_dequeued, 0, __ATOMIC_RELAXED);
+                       rte_atomic_store_explicit(&tp->nb_dequeued, 0, 
rte_memory_order_relaxed);
        }
 
        return TEST_SUCCESS;
@@ -3549,10 +3565,11 @@ typedef int (test_case_function)(struct active_device 
*ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       __atomic_store_n(&tp->processing_status, 0, __ATOMIC_RELAXED);
-       __atomic_store_n(&tp->nb_dequeued, 0, __ATOMIC_RELAXED);
+       rte_atomic_store_explicit(&tp->processing_status, 0, 
rte_memory_order_relaxed);
+       rte_atomic_store_explicit(&tp->nb_dequeued, 0, 
rte_memory_order_relaxed);
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_enc_op_alloc_bulk(tp->op_params->mp, ops,
                        num_to_process);
@@ -3592,15 +3609,17 @@ typedef int (test_case_function)(struct active_device 
*ad,
                         * the number of operations is not a multiple of
                         * burst size.
                         */
-                       __atomic_store_n(&tp->burst_sz, num_to_enq, 
__ATOMIC_RELAXED);
+                       rte_atomic_store_explicit(&tp->burst_sz, num_to_enq,
+                           rte_memory_order_relaxed);
 
                        /* Wait until processing of previous batch is
                         * completed
                         */
-                       rte_wait_until_equal_16(&tp->nb_dequeued, enqueued, 
__ATOMIC_RELAXED);
+                       rte_wait_until_equal_16((uint16_t 
*)(uintptr_t)&tp->nb_dequeued, enqueued,
+                           rte_memory_order_relaxed);
                }
                if (j != TEST_REPETITIONS - 1)
-                       __atomic_store_n(&tp->nb_dequeued, 0, __ATOMIC_RELAXED);
+                       rte_atomic_store_explicit(&tp->nb_dequeued, 0, 
rte_memory_order_relaxed);
        }
 
        return TEST_SUCCESS;
@@ -3636,10 +3655,11 @@ typedef int (test_case_function)(struct active_device 
*ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       __atomic_store_n(&tp->processing_status, 0, __ATOMIC_RELAXED);
-       __atomic_store_n(&tp->nb_dequeued, 0, __ATOMIC_RELAXED);
+       rte_atomic_store_explicit(&tp->processing_status, 0, 
rte_memory_order_relaxed);
+       rte_atomic_store_explicit(&tp->nb_dequeued, 0, 
rte_memory_order_relaxed);
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_enc_op_alloc_bulk(tp->op_params->mp, ops,
                        num_to_process);
@@ -3681,15 +3701,17 @@ typedef int (test_case_function)(struct active_device 
*ad,
                         * the number of operations is not a multiple of
                         * burst size.
                         */
-                       __atomic_store_n(&tp->burst_sz, num_to_enq, 
__ATOMIC_RELAXED);
+                       rte_atomic_store_explicit(&tp->burst_sz, num_to_enq,
+                           rte_memory_order_relaxed);
 
                        /* Wait until processing of previous batch is
                         * completed
                         */
-                       rte_wait_until_equal_16(&tp->nb_dequeued, enqueued, 
__ATOMIC_RELAXED);
+                       rte_wait_until_equal_16((uint16_t 
*)(uintptr_t)&tp->nb_dequeued, enqueued,
+                           rte_memory_order_relaxed);
                }
                if (j != TEST_REPETITIONS - 1)
-                       __atomic_store_n(&tp->nb_dequeued, 0, __ATOMIC_RELAXED);
+                       rte_atomic_store_explicit(&tp->nb_dequeued, 0, 
rte_memory_order_relaxed);
        }
 
        return TEST_SUCCESS;
@@ -3725,10 +3747,11 @@ typedef int (test_case_function)(struct active_device 
*ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       __atomic_store_n(&tp->processing_status, 0, __ATOMIC_RELAXED);
-       __atomic_store_n(&tp->nb_dequeued, 0, __ATOMIC_RELAXED);
+       rte_atomic_store_explicit(&tp->processing_status, 0, 
rte_memory_order_relaxed);
+       rte_atomic_store_explicit(&tp->nb_dequeued, 0, 
rte_memory_order_relaxed);
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_fft_op_alloc_bulk(tp->op_params->mp, ops,
                        num_to_process);
@@ -3769,15 +3792,17 @@ typedef int (test_case_function)(struct active_device 
*ad,
                         * the number of operations is not a multiple of
                         * burst size.
                         */
-                       __atomic_store_n(&tp->burst_sz, num_to_enq, 
__ATOMIC_RELAXED);
+                       rte_atomic_store_explicit(&tp->burst_sz, num_to_enq,
+                           rte_memory_order_relaxed);
 
                        /* Wait until processing of previous batch is
                         * completed
                         */
-                       rte_wait_until_equal_16(&tp->nb_dequeued, enqueued, 
__ATOMIC_RELAXED);
+                       rte_wait_until_equal_16((uint16_t 
*)(uintptr_t)&tp->nb_dequeued, enqueued,
+                           rte_memory_order_relaxed);
                }
                if (j != TEST_REPETITIONS - 1)
-                       __atomic_store_n(&tp->nb_dequeued, 0, __ATOMIC_RELAXED);
+                       rte_atomic_store_explicit(&tp->nb_dequeued, 0, 
rte_memory_order_relaxed);
        }
 
        return TEST_SUCCESS;
@@ -3811,10 +3836,11 @@ typedef int (test_case_function)(struct active_device 
*ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       __atomic_store_n(&tp->processing_status, 0, __ATOMIC_RELAXED);
-       __atomic_store_n(&tp->nb_dequeued, 0, __ATOMIC_RELAXED);
+       rte_atomic_store_explicit(&tp->processing_status, 0, 
rte_memory_order_relaxed);
+       rte_atomic_store_explicit(&tp->nb_dequeued, 0, 
rte_memory_order_relaxed);
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_mldts_op_alloc_bulk(tp->op_params->mp, ops, 
num_to_process);
        TEST_ASSERT_SUCCESS(ret, "Allocation failed for %d ops", 
num_to_process);
@@ -3851,15 +3877,17 @@ typedef int (test_case_function)(struct active_device 
*ad,
                         * the number of operations is not a multiple of
                         * burst size.
                         */
-                       __atomic_store_n(&tp->burst_sz, num_to_enq, 
__ATOMIC_RELAXED);
+                       rte_atomic_store_explicit(&tp->burst_sz, num_to_enq,
+                           rte_memory_order_relaxed);
 
                        /* Wait until processing of previous batch is
                         * completed
                         */
-                       rte_wait_until_equal_16(&tp->nb_dequeued, enqueued, 
__ATOMIC_RELAXED);
+                       rte_wait_until_equal_16((uint16_t 
*)(uintptr_t)&tp->nb_dequeued, enqueued,
+                           rte_memory_order_relaxed);
                }
                if (j != TEST_REPETITIONS - 1)
-                       __atomic_store_n(&tp->nb_dequeued, 0, __ATOMIC_RELAXED);
+                       rte_atomic_store_explicit(&tp->nb_dequeued, 0, 
rte_memory_order_relaxed);
        }
 
        return TEST_SUCCESS;
@@ -3894,7 +3922,8 @@ typedef int (test_case_function)(struct active_device *ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_dec_op_alloc_bulk(tp->op_params->mp, ops_enq, num_ops);
        TEST_ASSERT_SUCCESS(ret, "Allocation failed for %d ops", num_ops);
@@ -4013,7 +4042,8 @@ typedef int (test_case_function)(struct active_device *ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_dec_op_alloc_bulk(tp->op_params->mp, ops_enq, num_ops);
        TEST_ASSERT_SUCCESS(ret, "Allocation failed for %d ops", num_ops);
@@ -4148,7 +4178,8 @@ typedef int (test_case_function)(struct active_device *ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_dec_op_alloc_bulk(tp->op_params->mp, ops_enq, num_ops);
        TEST_ASSERT_SUCCESS(ret, "Allocation failed for %d ops", num_ops);
@@ -4271,7 +4302,8 @@ typedef int (test_case_function)(struct active_device *ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_dec_op_alloc_bulk(tp->op_params->mp, ops_enq, num_ops);
        TEST_ASSERT_SUCCESS(ret, "Allocation failed for %d ops", num_ops);
@@ -4402,7 +4434,8 @@ typedef int (test_case_function)(struct active_device *ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_enc_op_alloc_bulk(tp->op_params->mp, ops_enq,
                        num_ops);
@@ -4503,7 +4536,8 @@ typedef int (test_case_function)(struct active_device *ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_enc_op_alloc_bulk(tp->op_params->mp, ops_enq,
                        num_ops);
@@ -4604,7 +4638,8 @@ typedef int (test_case_function)(struct active_device *ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_fft_op_alloc_bulk(tp->op_params->mp, ops_enq, num_ops);
        TEST_ASSERT_SUCCESS(ret, "Allocation failed for %d ops", num_ops);
@@ -4702,7 +4737,8 @@ typedef int (test_case_function)(struct active_device *ad,
 
        bufs = &tp->op_params->q_bufs[GET_SOCKET(info.socket_id)][queue_id];
 
-       rte_wait_until_equal_16(&tp->op_params->sync, SYNC_START, 
__ATOMIC_RELAXED);
+       rte_wait_until_equal_16((uint16_t *)(uintptr_t)&tp->op_params->sync, 
SYNC_START,
+           rte_memory_order_relaxed);
 
        ret = rte_bbdev_mldts_op_alloc_bulk(tp->op_params->mp, ops_enq, 
num_ops);
        TEST_ASSERT_SUCCESS(ret, "Allocation failed for %d ops", num_ops);
@@ -4898,7 +4934,7 @@ typedef int (test_case_function)(struct active_device *ad,
        else
                return TEST_SKIPPED;
 
-       __atomic_store_n(&op_params->sync, SYNC_WAIT, __ATOMIC_RELAXED);
+       rte_atomic_store_explicit(&op_params->sync, SYNC_WAIT, 
rte_memory_order_relaxed);
 
        /* Main core is set at first entry */
        t_params[0].dev_id = ad->dev_id;
@@ -4921,7 +4957,7 @@ typedef int (test_case_function)(struct active_device *ad,
                                &t_params[used_cores++], lcore_id);
        }
 
-       __atomic_store_n(&op_params->sync, SYNC_START, __ATOMIC_RELAXED);
+       rte_atomic_store_explicit(&op_params->sync, SYNC_START, 
rte_memory_order_relaxed);
        ret = bler_function(&t_params[0]);
 
        /* Main core is always used */
@@ -5024,7 +5060,7 @@ typedef int (test_case_function)(struct active_device *ad,
                        throughput_function = throughput_pmd_lcore_enc;
        }
 
-       __atomic_store_n(&op_params->sync, SYNC_WAIT, __ATOMIC_RELAXED);
+       rte_atomic_store_explicit(&op_params->sync, SYNC_WAIT, 
rte_memory_order_relaxed);
 
        /* Main core is set at first entry */
        t_params[0].dev_id = ad->dev_id;
@@ -5047,7 +5083,7 @@ typedef int (test_case_function)(struct active_device *ad,
                                &t_params[used_cores++], lcore_id);
        }
 
-       __atomic_store_n(&op_params->sync, SYNC_START, __ATOMIC_RELAXED);
+       rte_atomic_store_explicit(&op_params->sync, SYNC_START, 
rte_memory_order_relaxed);
        ret = throughput_function(&t_params[0]);
 
        /* Main core is always used */
@@ -5077,29 +5113,30 @@ typedef int (test_case_function)(struct active_device 
*ad,
         * Wait for main lcore operations.
         */
        tp = &t_params[0];
-       while ((__atomic_load_n(&tp->nb_dequeued, __ATOMIC_RELAXED) <
+       while ((rte_atomic_load_explicit(&tp->nb_dequeued, 
rte_memory_order_relaxed) <
                op_params->num_to_process) &&
-               (__atomic_load_n(&tp->processing_status, __ATOMIC_RELAXED) !=
+               (rte_atomic_load_explicit(&tp->processing_status, 
rte_memory_order_relaxed) !=
                TEST_FAILED))
                rte_pause();
 
        tp->ops_per_sec /= TEST_REPETITIONS;
        tp->mbps /= TEST_REPETITIONS;
-       ret |= (int)__atomic_load_n(&tp->processing_status, __ATOMIC_RELAXED);
+       ret |= (int)rte_atomic_load_explicit(&tp->processing_status, 
rte_memory_order_relaxed);
 
        /* Wait for worker lcores operations */
        for (used_cores = 1; used_cores < num_lcores; used_cores++) {
                tp = &t_params[used_cores];
 
-               while ((__atomic_load_n(&tp->nb_dequeued, __ATOMIC_RELAXED) <
+               while ((rte_atomic_load_explicit(&tp->nb_dequeued, 
rte_memory_order_relaxed) <
                        op_params->num_to_process) &&
-                       (__atomic_load_n(&tp->processing_status, 
__ATOMIC_RELAXED) !=
-                       TEST_FAILED))
+                       (rte_atomic_load_explicit(&tp->processing_status,
+                           rte_memory_order_relaxed) != TEST_FAILED))
                        rte_pause();
 
                tp->ops_per_sec /= TEST_REPETITIONS;
                tp->mbps /= TEST_REPETITIONS;
-               ret |= (int)__atomic_load_n(&tp->processing_status, 
__ATOMIC_RELAXED);
+               ret |= (int)rte_atomic_load_explicit(&tp->processing_status,
+                   rte_memory_order_relaxed);
        }
 
        /* Print throughput if test passed */
-- 
1.8.3.1

Reply via email to