DPDK 22.11.7
rte_atomic.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
3 */
4
5#ifndef _RTE_ATOMIC_H_
6#define _RTE_ATOMIC_H_
7
15#include <stdint.h>
16#include <rte_compat.h>
17#include <rte_common.h>
18
19#ifdef __DOXYGEN__
20
24
30static inline void rte_mb(void);
31
38static inline void rte_wmb(void);
39
46static inline void rte_rmb(void);
48
52
59static inline void rte_smp_mb(void);
60
68static inline void rte_smp_wmb(void);
69
77static inline void rte_smp_rmb(void);
79
83
90static inline void rte_io_mb(void);
91
99static inline void rte_io_wmb(void);
100
108static inline void rte_io_rmb(void);
110
111#endif /* __DOXYGEN__ */
112
119#define rte_compiler_barrier() do { \
120 asm volatile ("" : : : "memory"); \
121} while(0)
122
126static inline void rte_atomic_thread_fence(int memorder);
127
128/*------------------------- 16 bit atomic operations -------------------------*/
129
146static inline int
147rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
148
149#ifdef RTE_FORCE_INTRINSICS
150static inline int
151rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
152{
153 return __sync_bool_compare_and_swap(dst, exp, src);
154}
155#endif
156
172static inline uint16_t
173rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
174
175#ifdef RTE_FORCE_INTRINSICS
176static inline uint16_t
177rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
178{
179 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
180}
181#endif
182
186typedef struct {
187 volatile int16_t cnt;
189
193#define RTE_ATOMIC16_INIT(val) { (val) }
194
201static inline void
203{
204 v->cnt = 0;
205}
206
215static inline int16_t
217{
218 return v->cnt;
219}
220
229static inline void
230rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
231{
232 v->cnt = new_value;
233}
234
243static inline void
245{
246 __sync_fetch_and_add(&v->cnt, inc);
247}
248
257static inline void
259{
260 __sync_fetch_and_sub(&v->cnt, dec);
261}
262
269static inline void
271
272#ifdef RTE_FORCE_INTRINSICS
273static inline void
275{
276 rte_atomic16_add(v, 1);
277}
278#endif
279
286static inline void
288
289#ifdef RTE_FORCE_INTRINSICS
290static inline void
292{
293 rte_atomic16_sub(v, 1);
294}
295#endif
296
310static inline int16_t
312{
313 return __sync_add_and_fetch(&v->cnt, inc);
314}
315
330static inline int16_t
332{
333 return __sync_sub_and_fetch(&v->cnt, dec);
334}
335
348
349#ifdef RTE_FORCE_INTRINSICS
350static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
351{
352 return __sync_add_and_fetch(&v->cnt, 1) == 0;
353}
354#endif
355
368
369#ifdef RTE_FORCE_INTRINSICS
370static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
371{
372 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
373}
374#endif
375
388
389#ifdef RTE_FORCE_INTRINSICS
390static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
391{
392 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
393}
394#endif
395
402static inline void rte_atomic16_clear(rte_atomic16_t *v)
403{
404 v->cnt = 0;
405}
406
407/*------------------------- 32 bit atomic operations -------------------------*/
408
425static inline int
426rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
427
428#ifdef RTE_FORCE_INTRINSICS
429static inline int
430rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
431{
432 return __sync_bool_compare_and_swap(dst, exp, src);
433}
434#endif
435
451static inline uint32_t
452rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
453
454#ifdef RTE_FORCE_INTRINSICS
455static inline uint32_t
456rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
457{
458 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
459}
460#endif
461
465typedef struct {
466 volatile int32_t cnt;
468
472#define RTE_ATOMIC32_INIT(val) { (val) }
473
480static inline void
482{
483 v->cnt = 0;
484}
485
494static inline int32_t
496{
497 return v->cnt;
498}
499
508static inline void
509rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
510{
511 v->cnt = new_value;
512}
513
522static inline void
524{
525 __sync_fetch_and_add(&v->cnt, inc);
526}
527
536static inline void
538{
539 __sync_fetch_and_sub(&v->cnt, dec);
540}
541
548static inline void
550
551#ifdef RTE_FORCE_INTRINSICS
552static inline void
554{
555 rte_atomic32_add(v, 1);
556}
557#endif
558
565static inline void
567
568#ifdef RTE_FORCE_INTRINSICS
569static inline void
571{
572 rte_atomic32_sub(v,1);
573}
574#endif
575
589static inline int32_t
591{
592 return __sync_add_and_fetch(&v->cnt, inc);
593}
594
609static inline int32_t
611{
612 return __sync_sub_and_fetch(&v->cnt, dec);
613}
614
627
628#ifdef RTE_FORCE_INTRINSICS
629static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
630{
631 return __sync_add_and_fetch(&v->cnt, 1) == 0;
632}
633#endif
634
647
648#ifdef RTE_FORCE_INTRINSICS
649static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
650{
651 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
652}
653#endif
654
667
668#ifdef RTE_FORCE_INTRINSICS
669static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
670{
671 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
672}
673#endif
674
681static inline void rte_atomic32_clear(rte_atomic32_t *v)
682{
683 v->cnt = 0;
684}
685
686/*------------------------- 64 bit atomic operations -------------------------*/
687
703static inline int
704rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
705
706#ifdef RTE_FORCE_INTRINSICS
707static inline int
708rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
709{
710 return __sync_bool_compare_and_swap(dst, exp, src);
711}
712#endif
713
729static inline uint64_t
730rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
731
732#ifdef RTE_FORCE_INTRINSICS
733static inline uint64_t
734rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
735{
736 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
737}
738#endif
739
743typedef struct {
744 volatile int64_t cnt;
746
750#define RTE_ATOMIC64_INIT(val) { (val) }
751
758static inline void
760
761#ifdef RTE_FORCE_INTRINSICS
762static inline void
764{
765#ifdef __LP64__
766 v->cnt = 0;
767#else
768 int success = 0;
769 uint64_t tmp;
770
771 while (success == 0) {
772 tmp = v->cnt;
773 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
774 tmp, 0);
775 }
776#endif
777}
778#endif
779
788static inline int64_t
790
791#ifdef RTE_FORCE_INTRINSICS
792static inline int64_t
794{
795#ifdef __LP64__
796 return v->cnt;
797#else
798 int success = 0;
799 uint64_t tmp;
800
801 while (success == 0) {
802 tmp = v->cnt;
803 /* replace the value by itself */
804 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
805 tmp, tmp);
806 }
807 return tmp;
808#endif
809}
810#endif
811
820static inline void
821rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
822
823#ifdef RTE_FORCE_INTRINSICS
824static inline void
825rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
826{
827#ifdef __LP64__
828 v->cnt = new_value;
829#else
830 int success = 0;
831 uint64_t tmp;
832
833 while (success == 0) {
834 tmp = v->cnt;
835 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
836 tmp, new_value);
837 }
838#endif
839}
840#endif
841
850static inline void
852
853#ifdef RTE_FORCE_INTRINSICS
854static inline void
855rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
856{
857 __sync_fetch_and_add(&v->cnt, inc);
858}
859#endif
860
869static inline void
871
872#ifdef RTE_FORCE_INTRINSICS
873static inline void
874rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
875{
876 __sync_fetch_and_sub(&v->cnt, dec);
877}
878#endif
879
886static inline void
888
889#ifdef RTE_FORCE_INTRINSICS
890static inline void
892{
893 rte_atomic64_add(v, 1);
894}
895#endif
896
903static inline void
905
906#ifdef RTE_FORCE_INTRINSICS
907static inline void
909{
910 rte_atomic64_sub(v, 1);
911}
912#endif
913
927static inline int64_t
929
930#ifdef RTE_FORCE_INTRINSICS
931static inline int64_t
933{
934 return __sync_add_and_fetch(&v->cnt, inc);
935}
936#endif
937
951static inline int64_t
953
954#ifdef RTE_FORCE_INTRINSICS
955static inline int64_t
957{
958 return __sync_sub_and_fetch(&v->cnt, dec);
959}
960#endif
961
974
975#ifdef RTE_FORCE_INTRINSICS
976static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
977{
978 return rte_atomic64_add_return(v, 1) == 0;
979}
980#endif
981
994
995#ifdef RTE_FORCE_INTRINSICS
996static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
997{
998 return rte_atomic64_sub_return(v, 1) == 0;
999}
1000#endif
1001
1014
1015#ifdef RTE_FORCE_INTRINSICS
1016static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1017{
1018 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1019}
1020#endif
1021
1028static inline void rte_atomic64_clear(rte_atomic64_t *v);
1029
1030#ifdef RTE_FORCE_INTRINSICS
1031static inline void rte_atomic64_clear(rte_atomic64_t *v)
1032{
1033 rte_atomic64_set(v, 0);
1034}
1035#endif
1036
1037/*------------------------ 128 bit atomic operations -------------------------*/
1038
1043typedef struct {
1045 union {
1046 uint64_t val[2];
1047#ifdef RTE_ARCH_64
1048 __extension__ __int128 int128;
1049#endif
1050 };
1051} __rte_aligned(16) rte_int128_t;
1052
1053#ifdef __DOXYGEN__
1054
1094__rte_experimental
1095static inline int
1097 rte_int128_t *exp,
1098 const rte_int128_t *src,
1099 unsigned int weak,
1100 int success,
1101 int failure);
1102
1103#endif /* __DOXYGEN__ */
1104
1105#endif /* _RTE_ATOMIC_H_ */
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static void rte_atomic16_dec(rte_atomic16_t *v)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
static void rte_atomic_thread_fence(int memorder)
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static void rte_io_rmb(void)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition: rte_atomic.h:681
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_io_mb(void)
static void rte_io_wmb(void)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_atomic64_clear(rte_atomic64_t *v)
static void rte_smp_mb(void)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:331
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition: rte_atomic.h:230
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition: rte_atomic.h:402
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:244
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_mb(void)
static void rte_atomic32_inc(rte_atomic32_t *v)
static void rte_smp_wmb(void)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:258
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_atomic32_dec(rte_atomic32_t *v)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static void rte_atomic64_dec(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:610
static void rte_atomic16_init(rte_atomic16_t *v)
Definition: rte_atomic.h:202
static void rte_smp_rmb(void)
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static __rte_experimental int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition: rte_atomic.h:495
static void rte_wmb(void)
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:523
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition: rte_atomic.h:509
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:537
static void rte_atomic64_inc(rte_atomic64_t *v)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition: rte_atomic.h:216
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_atomic64_init(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:311
static void rte_atomic32_init(rte_atomic32_t *v)
Definition: rte_atomic.h:481
static void rte_atomic16_inc(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:590
#define RTE_STD_C11
Definition: rte_common.h:39
__extension__ struct rte_eth_link __rte_aligned(8)
volatile int16_t cnt
Definition: rte_atomic.h:187
volatile int32_t cnt
Definition: rte_atomic.h:466
volatile int64_t cnt
Definition: rte_atomic.h:744