18 #include <rte_stdatomic.h> 35 static inline void rte_mb(
void);
43 static inline void rte_wmb(
void);
51 static inline void rte_rmb(
void);
154 #ifdef RTE_TOOLCHAIN_MSVC 155 #define rte_compiler_barrier() _ReadWriteBarrier() 157 #define rte_compiler_barrier() do { \ 158 asm volatile ("" : : : "memory"); \ 169 #ifndef RTE_TOOLCHAIN_MSVC 190 #ifdef RTE_FORCE_INTRINSICS 194 return __sync_bool_compare_and_swap(dst, exp, src);
213 static inline uint16_t
216 #ifdef RTE_FORCE_INTRINSICS 217 static inline uint16_t
220 return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
234 #define RTE_ATOMIC16_INIT(val) { (val) } 256 static inline int16_t
287 rte_atomic_fetch_add_explicit((
volatile __rte_atomic int16_t *)&v->
cnt, inc,
288 rte_memory_order_seq_cst);
302 rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int16_t *)&v->
cnt, dec,
303 rte_memory_order_seq_cst);
315 #ifdef RTE_FORCE_INTRINSICS 332 #ifdef RTE_FORCE_INTRINSICS 353 static inline int16_t
356 return rte_atomic_fetch_add_explicit((
volatile __rte_atomic int16_t *)&v->
cnt, inc,
357 rte_memory_order_seq_cst) + inc;
374 static inline int16_t
377 return rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int16_t *)&v->
cnt, dec,
378 rte_memory_order_seq_cst) - dec;
394 #ifdef RTE_FORCE_INTRINSICS 397 return rte_atomic_fetch_add_explicit((
volatile __rte_atomic int16_t *)&v->
cnt, 1,
398 rte_memory_order_seq_cst) + 1 == 0;
415 #ifdef RTE_FORCE_INTRINSICS 418 return rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int16_t *)&v->
cnt, 1,
419 rte_memory_order_seq_cst) - 1 == 0;
436 #ifdef RTE_FORCE_INTRINSICS 475 #ifdef RTE_FORCE_INTRINSICS 479 return __sync_bool_compare_and_swap(dst, exp, src);
498 static inline uint32_t
501 #ifdef RTE_FORCE_INTRINSICS 502 static inline uint32_t
505 return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
519 #define RTE_ATOMIC32_INIT(val) { (val) } 541 static inline int32_t
572 rte_atomic_fetch_add_explicit((
volatile __rte_atomic int32_t *)&v->
cnt, inc,
573 rte_memory_order_seq_cst);
587 rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int32_t *)&v->
cnt, dec,
588 rte_memory_order_seq_cst);
600 #ifdef RTE_FORCE_INTRINSICS 617 #ifdef RTE_FORCE_INTRINSICS 638 static inline int32_t
641 return rte_atomic_fetch_add_explicit((
volatile __rte_atomic int32_t *)&v->
cnt, inc,
642 rte_memory_order_seq_cst) + inc;
659 static inline int32_t
662 return rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int32_t *)&v->
cnt, dec,
663 rte_memory_order_seq_cst) - dec;
679 #ifdef RTE_FORCE_INTRINSICS 682 return rte_atomic_fetch_add_explicit((
volatile __rte_atomic int32_t *)&v->
cnt, 1,
683 rte_memory_order_seq_cst) + 1 == 0;
700 #ifdef RTE_FORCE_INTRINSICS 703 return rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int32_t *)&v->
cnt, 1,
704 rte_memory_order_seq_cst) - 1 == 0;
721 #ifdef RTE_FORCE_INTRINSICS 759 #ifdef RTE_FORCE_INTRINSICS 763 return __sync_bool_compare_and_swap(dst, exp, src);
782 static inline uint64_t
785 #ifdef RTE_FORCE_INTRINSICS 786 static inline uint64_t
789 return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
803 #define RTE_ATOMIC64_INIT(val) { (val) } 814 #ifdef RTE_FORCE_INTRINSICS 824 while (success == 0) {
841 static inline int64_t
844 #ifdef RTE_FORCE_INTRINSICS 845 static inline int64_t
854 while (success == 0) {
876 #ifdef RTE_FORCE_INTRINSICS 886 while (success == 0) {
906 #ifdef RTE_FORCE_INTRINSICS 910 rte_atomic_fetch_add_explicit((
volatile __rte_atomic int64_t *)&v->
cnt, inc,
911 rte_memory_order_seq_cst);
926 #ifdef RTE_FORCE_INTRINSICS 930 rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int64_t *)&v->
cnt, dec,
931 rte_memory_order_seq_cst);
944 #ifdef RTE_FORCE_INTRINSICS 961 #ifdef RTE_FORCE_INTRINSICS 982 static inline int64_t
985 #ifdef RTE_FORCE_INTRINSICS 986 static inline int64_t
989 return rte_atomic_fetch_add_explicit((
volatile __rte_atomic int64_t *)&v->
cnt, inc,
990 rte_memory_order_seq_cst) + inc;
1007 static inline int64_t
1010 #ifdef RTE_FORCE_INTRINSICS 1011 static inline int64_t
1014 return rte_atomic_fetch_sub_explicit((
volatile __rte_atomic int64_t *)&v->
cnt, dec,
1015 rte_memory_order_seq_cst) - dec;
1032 #ifdef RTE_FORCE_INTRINSICS 1052 #ifdef RTE_FORCE_INTRINSICS 1072 #ifdef RTE_FORCE_INTRINSICS 1087 #ifdef RTE_FORCE_INTRINSICS 1105 #ifndef RTE_TOOLCHAIN_MSVC 1106 __extension__ __int128 int128;
1156 const rte_int128_t *src,
static void rte_atomic32_inc(rte_atomic32_t *v)
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_smp_rmb(void)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
static void rte_wmb(void)
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
static void rte_atomic32_dec(rte_atomic32_t *v)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static void rte_io_mb(void)
static void rte_atomic64_inc(rte_atomic64_t *v)
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
static void rte_atomic32_init(rte_atomic32_t *v)
static void rte_atomic16_clear(rte_atomic16_t *v)
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
static void rte_io_wmb(void)
static void rte_atomic64_clear(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_smp_wmb(void)
static void rte_atomic_thread_fence(rte_memory_order memorder)
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
static void rte_atomic16_inc(rte_atomic16_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_smp_mb(void)
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_atomic16_dec(rte_atomic16_t *v)
static void rte_atomic64_init(rte_atomic64_t *v)
static void rte_atomic64_dec(rte_atomic64_t *v)
static void rte_atomic16_init(rte_atomic16_t *v)
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static void rte_io_rmb(void)