18 #include <rte_stdatomic.h> 31 static inline void rte_mb(
void);
39 static inline void rte_wmb(
void);
47 static inline void rte_rmb(
void);
150 #ifdef RTE_TOOLCHAIN_MSVC 151 #define rte_compiler_barrier() _ReadWriteBarrier() 153 #define rte_compiler_barrier() do { \ 154 asm volatile ("" : : : "memory"); \ 165 #ifndef RTE_TOOLCHAIN_MSVC 186 #ifdef RTE_FORCE_INTRINSICS 190 return __sync_bool_compare_and_swap(dst, exp, src);
209 static inline uint16_t
212 #ifdef RTE_FORCE_INTRINSICS 213 static inline uint16_t
216 return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
230 #define RTE_ATOMIC16_INIT(val) { (val) } 252 static inline int16_t
283 rte_atomic_fetch_add_explicit((
volatile __rte_atomic int16_t *)&v->
cnt, inc,
284 rte_memory_order_seq_cst);
298 rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int16_t *)&v->
cnt, dec,
299 rte_memory_order_seq_cst);
311 #ifdef RTE_FORCE_INTRINSICS 328 #ifdef RTE_FORCE_INTRINSICS 349 static inline int16_t
352 return rte_atomic_fetch_add_explicit((
volatile __rte_atomic int16_t *)&v->
cnt, inc,
353 rte_memory_order_seq_cst) + inc;
370 static inline int16_t
373 return rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int16_t *)&v->
cnt, dec,
374 rte_memory_order_seq_cst) - dec;
390 #ifdef RTE_FORCE_INTRINSICS 393 return rte_atomic_fetch_add_explicit((
volatile __rte_atomic int16_t *)&v->
cnt, 1,
394 rte_memory_order_seq_cst) + 1 == 0;
411 #ifdef RTE_FORCE_INTRINSICS 414 return rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int16_t *)&v->
cnt, 1,
415 rte_memory_order_seq_cst) - 1 == 0;
432 #ifdef RTE_FORCE_INTRINSICS 471 #ifdef RTE_FORCE_INTRINSICS 475 return __sync_bool_compare_and_swap(dst, exp, src);
494 static inline uint32_t
497 #ifdef RTE_FORCE_INTRINSICS 498 static inline uint32_t
501 return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
515 #define RTE_ATOMIC32_INIT(val) { (val) } 537 static inline int32_t
568 rte_atomic_fetch_add_explicit((
volatile __rte_atomic int32_t *)&v->
cnt, inc,
569 rte_memory_order_seq_cst);
583 rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int32_t *)&v->
cnt, dec,
584 rte_memory_order_seq_cst);
596 #ifdef RTE_FORCE_INTRINSICS 613 #ifdef RTE_FORCE_INTRINSICS 634 static inline int32_t
637 return rte_atomic_fetch_add_explicit((
volatile __rte_atomic int32_t *)&v->
cnt, inc,
638 rte_memory_order_seq_cst) + inc;
655 static inline int32_t
658 return rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int32_t *)&v->
cnt, dec,
659 rte_memory_order_seq_cst) - dec;
675 #ifdef RTE_FORCE_INTRINSICS 678 return rte_atomic_fetch_add_explicit((
volatile __rte_atomic int32_t *)&v->
cnt, 1,
679 rte_memory_order_seq_cst) + 1 == 0;
696 #ifdef RTE_FORCE_INTRINSICS 699 return rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int32_t *)&v->
cnt, 1,
700 rte_memory_order_seq_cst) - 1 == 0;
717 #ifdef RTE_FORCE_INTRINSICS 755 #ifdef RTE_FORCE_INTRINSICS 759 return __sync_bool_compare_and_swap(dst, exp, src);
778 static inline uint64_t
781 #ifdef RTE_FORCE_INTRINSICS 782 static inline uint64_t
785 return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
799 #define RTE_ATOMIC64_INIT(val) { (val) } 810 #ifdef RTE_FORCE_INTRINSICS 820 while (success == 0) {
837 static inline int64_t
840 #ifdef RTE_FORCE_INTRINSICS 841 static inline int64_t
850 while (success == 0) {
872 #ifdef RTE_FORCE_INTRINSICS 882 while (success == 0) {
902 #ifdef RTE_FORCE_INTRINSICS 906 rte_atomic_fetch_add_explicit((
volatile __rte_atomic int64_t *)&v->
cnt, inc,
907 rte_memory_order_seq_cst);
922 #ifdef RTE_FORCE_INTRINSICS 926 rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int64_t *)&v->
cnt, dec,
927 rte_memory_order_seq_cst);
940 #ifdef RTE_FORCE_INTRINSICS 957 #ifdef RTE_FORCE_INTRINSICS 978 static inline int64_t
981 #ifdef RTE_FORCE_INTRINSICS 982 static inline int64_t
985 return rte_atomic_fetch_add_explicit((
volatile __rte_atomic int64_t *)&v->
cnt, inc,
986 rte_memory_order_seq_cst) + inc;
1003 static inline int64_t
1006 #ifdef RTE_FORCE_INTRINSICS 1007 static inline int64_t
1010 return rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int64_t *)&v->
cnt, dec,
1011 rte_memory_order_seq_cst) - dec;
1028 #ifdef RTE_FORCE_INTRINSICS 1048 #ifdef RTE_FORCE_INTRINSICS 1068 #ifdef RTE_FORCE_INTRINSICS 1083 #ifdef RTE_FORCE_INTRINSICS 1101 #ifndef RTE_TOOLCHAIN_MSVC 1102 __extension__ __int128 int128;
1152 const rte_int128_t *src,
static void rte_atomic32_inc(rte_atomic32_t *v)
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_smp_rmb(void)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
static void rte_wmb(void)
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
static void rte_atomic32_dec(rte_atomic32_t *v)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static void rte_io_mb(void)
static void rte_atomic64_inc(rte_atomic64_t *v)
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
static void rte_atomic32_init(rte_atomic32_t *v)
static void rte_atomic16_clear(rte_atomic16_t *v)
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
static void rte_io_wmb(void)
static void rte_atomic64_clear(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_smp_wmb(void)
static void rte_atomic_thread_fence(rte_memory_order memorder)
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
static void rte_atomic16_inc(rte_atomic16_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_smp_mb(void)
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_atomic16_dec(rte_atomic16_t *v)
static void rte_atomic64_init(rte_atomic64_t *v)
static void rte_atomic64_dec(rte_atomic64_t *v)
static void rte_atomic16_init(rte_atomic16_t *v)
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static void rte_io_rmb(void)