16#include <rte_compat.h>
119#define rte_compiler_barrier() do { \
120 asm volatile ("" : : : "memory"); \
149#ifdef RTE_FORCE_INTRINSICS
153 return __sync_bool_compare_and_swap(dst, exp, src);
172static inline uint16_t
175#ifdef RTE_FORCE_INTRINSICS
176static inline uint16_t
179 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
193#define RTE_ATOMIC16_INIT(val) { (val) }
246 __sync_fetch_and_add(&v->
cnt, inc);
260 __sync_fetch_and_sub(&v->
cnt, dec);
272#ifdef RTE_FORCE_INTRINSICS
289#ifdef RTE_FORCE_INTRINSICS
313 return __sync_add_and_fetch(&v->
cnt, inc);
333 return __sync_sub_and_fetch(&v->
cnt, dec);
349#ifdef RTE_FORCE_INTRINSICS
352 return __sync_add_and_fetch(&v->
cnt, 1) == 0;
369#ifdef RTE_FORCE_INTRINSICS
372 return __sync_sub_and_fetch(&v->
cnt, 1) == 0;
389#ifdef RTE_FORCE_INTRINSICS
428#ifdef RTE_FORCE_INTRINSICS
432 return __sync_bool_compare_and_swap(dst, exp, src);
451static inline uint32_t
454#ifdef RTE_FORCE_INTRINSICS
455static inline uint32_t
458 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
472#define RTE_ATOMIC32_INIT(val) { (val) }
525 __sync_fetch_and_add(&v->
cnt, inc);
539 __sync_fetch_and_sub(&v->
cnt, dec);
551#ifdef RTE_FORCE_INTRINSICS
568#ifdef RTE_FORCE_INTRINSICS
592 return __sync_add_and_fetch(&v->
cnt, inc);
612 return __sync_sub_and_fetch(&v->
cnt, dec);
628#ifdef RTE_FORCE_INTRINSICS
631 return __sync_add_and_fetch(&v->
cnt, 1) == 0;
648#ifdef RTE_FORCE_INTRINSICS
651 return __sync_sub_and_fetch(&v->
cnt, 1) == 0;
668#ifdef RTE_FORCE_INTRINSICS
706#ifdef RTE_FORCE_INTRINSICS
710 return __sync_bool_compare_and_swap(dst, exp, src);
729static inline uint64_t
732#ifdef RTE_FORCE_INTRINSICS
733static inline uint64_t
736 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
750#define RTE_ATOMIC64_INIT(val) { (val) }
761#ifdef RTE_FORCE_INTRINSICS
771 while (success == 0) {
791#ifdef RTE_FORCE_INTRINSICS
801 while (success == 0) {
823#ifdef RTE_FORCE_INTRINSICS
833 while (success == 0) {
853#ifdef RTE_FORCE_INTRINSICS
857 __sync_fetch_and_add(&v->
cnt, inc);
872#ifdef RTE_FORCE_INTRINSICS
876 __sync_fetch_and_sub(&v->
cnt, dec);
889#ifdef RTE_FORCE_INTRINSICS
906#ifdef RTE_FORCE_INTRINSICS
930#ifdef RTE_FORCE_INTRINSICS
934 return __sync_add_and_fetch(&v->
cnt, inc);
954#ifdef RTE_FORCE_INTRINSICS
958 return __sync_sub_and_fetch(&v->
cnt, dec);
975#ifdef RTE_FORCE_INTRINSICS
995#ifdef RTE_FORCE_INTRINSICS
1015#ifdef RTE_FORCE_INTRINSICS
1030#ifdef RTE_FORCE_INTRINSICS
1048 __extension__ __int128 int128;
1098 const rte_int128_t *src,
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static void rte_atomic16_dec(rte_atomic16_t *v)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
static void rte_atomic_thread_fence(int memorder)
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static void rte_io_rmb(void)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_io_mb(void)
static void rte_io_wmb(void)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_atomic64_clear(rte_atomic64_t *v)
static void rte_smp_mb(void)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
static void rte_atomic16_clear(rte_atomic16_t *v)
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_atomic32_inc(rte_atomic32_t *v)
static void rte_smp_wmb(void)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_atomic32_dec(rte_atomic32_t *v)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static void rte_atomic64_dec(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
static void rte_atomic16_init(rte_atomic16_t *v)
static void rte_smp_rmb(void)
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static __rte_experimental int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
static void rte_wmb(void)
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
static void rte_atomic64_inc(rte_atomic64_t *v)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_atomic64_init(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
static void rte_atomic32_init(rte_atomic32_t *v)
static void rte_atomic16_inc(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
__extension__ struct rte_eth_link __rte_aligned(8)