DPDK 21.11.9
rte_atomic.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
3 */
4
5#ifndef _RTE_ATOMIC_H_
6#define _RTE_ATOMIC_H_
7
15#include <stdint.h>
16#include <rte_common.h>
17
18#ifdef __DOXYGEN__
19
23
29static inline void rte_mb(void);
30
37static inline void rte_wmb(void);
38
45static inline void rte_rmb(void);
47
51
58static inline void rte_smp_mb(void);
59
67static inline void rte_smp_wmb(void);
68
76static inline void rte_smp_rmb(void);
78
82
89static inline void rte_io_mb(void);
90
98static inline void rte_io_wmb(void);
99
107static inline void rte_io_rmb(void);
109
110#endif /* __DOXYGEN__ */
111
118#define rte_compiler_barrier() do { \
119 asm volatile ("" : : : "memory"); \
120} while(0)
121
125static inline void rte_atomic_thread_fence(int memorder);
126
127/*------------------------- 16 bit atomic operations -------------------------*/
128
145static inline int
146rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
147
148#ifdef RTE_FORCE_INTRINSICS
149static inline int
150rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
151{
152 return __sync_bool_compare_and_swap(dst, exp, src);
153}
154#endif
155
171static inline uint16_t
172rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
173
174#ifdef RTE_FORCE_INTRINSICS
175static inline uint16_t
176rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
177{
178 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
179}
180#endif
181
185typedef struct {
186 volatile int16_t cnt;
188
192#define RTE_ATOMIC16_INIT(val) { (val) }
193
200static inline void
202{
203 v->cnt = 0;
204}
205
214static inline int16_t
216{
217 return v->cnt;
218}
219
228static inline void
229rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
230{
231 v->cnt = new_value;
232}
233
242static inline void
244{
245 __sync_fetch_and_add(&v->cnt, inc);
246}
247
256static inline void
258{
259 __sync_fetch_and_sub(&v->cnt, dec);
260}
261
268static inline void
270
271#ifdef RTE_FORCE_INTRINSICS
272static inline void
274{
275 rte_atomic16_add(v, 1);
276}
277#endif
278
285static inline void
287
288#ifdef RTE_FORCE_INTRINSICS
289static inline void
291{
292 rte_atomic16_sub(v, 1);
293}
294#endif
295
309static inline int16_t
311{
312 return __sync_add_and_fetch(&v->cnt, inc);
313}
314
329static inline int16_t
331{
332 return __sync_sub_and_fetch(&v->cnt, dec);
333}
334
347
348#ifdef RTE_FORCE_INTRINSICS
349static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
350{
351 return __sync_add_and_fetch(&v->cnt, 1) == 0;
352}
353#endif
354
367
368#ifdef RTE_FORCE_INTRINSICS
369static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
370{
371 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
372}
373#endif
374
387
388#ifdef RTE_FORCE_INTRINSICS
389static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
390{
391 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
392}
393#endif
394
401static inline void rte_atomic16_clear(rte_atomic16_t *v)
402{
403 v->cnt = 0;
404}
405
406/*------------------------- 32 bit atomic operations -------------------------*/
407
424static inline int
425rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
426
427#ifdef RTE_FORCE_INTRINSICS
428static inline int
429rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
430{
431 return __sync_bool_compare_and_swap(dst, exp, src);
432}
433#endif
434
450static inline uint32_t
451rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
452
453#ifdef RTE_FORCE_INTRINSICS
454static inline uint32_t
455rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
456{
457 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
458}
459#endif
460
464typedef struct {
465 volatile int32_t cnt;
467
471#define RTE_ATOMIC32_INIT(val) { (val) }
472
479static inline void
481{
482 v->cnt = 0;
483}
484
493static inline int32_t
495{
496 return v->cnt;
497}
498
507static inline void
508rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
509{
510 v->cnt = new_value;
511}
512
521static inline void
523{
524 __sync_fetch_and_add(&v->cnt, inc);
525}
526
535static inline void
537{
538 __sync_fetch_and_sub(&v->cnt, dec);
539}
540
547static inline void
549
550#ifdef RTE_FORCE_INTRINSICS
551static inline void
553{
554 rte_atomic32_add(v, 1);
555}
556#endif
557
564static inline void
566
567#ifdef RTE_FORCE_INTRINSICS
568static inline void
570{
571 rte_atomic32_sub(v,1);
572}
573#endif
574
588static inline int32_t
590{
591 return __sync_add_and_fetch(&v->cnt, inc);
592}
593
608static inline int32_t
610{
611 return __sync_sub_and_fetch(&v->cnt, dec);
612}
613
626
627#ifdef RTE_FORCE_INTRINSICS
628static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
629{
630 return __sync_add_and_fetch(&v->cnt, 1) == 0;
631}
632#endif
633
646
647#ifdef RTE_FORCE_INTRINSICS
648static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
649{
650 return __sync_sub_and_fetch(&v->cnt, 1) == 0;
651}
652#endif
653
666
667#ifdef RTE_FORCE_INTRINSICS
668static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
669{
670 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
671}
672#endif
673
680static inline void rte_atomic32_clear(rte_atomic32_t *v)
681{
682 v->cnt = 0;
683}
684
685/*------------------------- 64 bit atomic operations -------------------------*/
686
702static inline int
703rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
704
705#ifdef RTE_FORCE_INTRINSICS
706static inline int
707rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
708{
709 return __sync_bool_compare_and_swap(dst, exp, src);
710}
711#endif
712
728static inline uint64_t
729rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
730
731#ifdef RTE_FORCE_INTRINSICS
732static inline uint64_t
733rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
734{
735 return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
736}
737#endif
738
742typedef struct {
743 volatile int64_t cnt;
745
749#define RTE_ATOMIC64_INIT(val) { (val) }
750
757static inline void
759
760#ifdef RTE_FORCE_INTRINSICS
761static inline void
763{
764#ifdef __LP64__
765 v->cnt = 0;
766#else
767 int success = 0;
768 uint64_t tmp;
769
770 while (success == 0) {
771 tmp = v->cnt;
772 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
773 tmp, 0);
774 }
775#endif
776}
777#endif
778
787static inline int64_t
789
790#ifdef RTE_FORCE_INTRINSICS
791static inline int64_t
793{
794#ifdef __LP64__
795 return v->cnt;
796#else
797 int success = 0;
798 uint64_t tmp;
799
800 while (success == 0) {
801 tmp = v->cnt;
802 /* replace the value by itself */
803 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
804 tmp, tmp);
805 }
806 return tmp;
807#endif
808}
809#endif
810
819static inline void
820rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
821
822#ifdef RTE_FORCE_INTRINSICS
823static inline void
824rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
825{
826#ifdef __LP64__
827 v->cnt = new_value;
828#else
829 int success = 0;
830 uint64_t tmp;
831
832 while (success == 0) {
833 tmp = v->cnt;
834 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
835 tmp, new_value);
836 }
837#endif
838}
839#endif
840
849static inline void
851
852#ifdef RTE_FORCE_INTRINSICS
853static inline void
854rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
855{
856 __sync_fetch_and_add(&v->cnt, inc);
857}
858#endif
859
868static inline void
870
871#ifdef RTE_FORCE_INTRINSICS
872static inline void
873rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
874{
875 __sync_fetch_and_sub(&v->cnt, dec);
876}
877#endif
878
885static inline void
887
888#ifdef RTE_FORCE_INTRINSICS
889static inline void
891{
892 rte_atomic64_add(v, 1);
893}
894#endif
895
902static inline void
904
905#ifdef RTE_FORCE_INTRINSICS
906static inline void
908{
909 rte_atomic64_sub(v, 1);
910}
911#endif
912
926static inline int64_t
928
929#ifdef RTE_FORCE_INTRINSICS
930static inline int64_t
932{
933 return __sync_add_and_fetch(&v->cnt, inc);
934}
935#endif
936
950static inline int64_t
952
953#ifdef RTE_FORCE_INTRINSICS
954static inline int64_t
956{
957 return __sync_sub_and_fetch(&v->cnt, dec);
958}
959#endif
960
973
974#ifdef RTE_FORCE_INTRINSICS
975static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
976{
977 return rte_atomic64_add_return(v, 1) == 0;
978}
979#endif
980
993
994#ifdef RTE_FORCE_INTRINSICS
995static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
996{
997 return rte_atomic64_sub_return(v, 1) == 0;
998}
999#endif
1000
1013
1014#ifdef RTE_FORCE_INTRINSICS
1015static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1016{
1017 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1018}
1019#endif
1020
1027static inline void rte_atomic64_clear(rte_atomic64_t *v);
1028
1029#ifdef RTE_FORCE_INTRINSICS
1030static inline void rte_atomic64_clear(rte_atomic64_t *v)
1031{
1032 rte_atomic64_set(v, 0);
1033}
1034#endif
1035
1036/*------------------------ 128 bit atomic operations -------------------------*/
1037
1042typedef struct {
1044 union {
1045 uint64_t val[2];
1046#ifdef RTE_ARCH_64
1047 __extension__ __int128 int128;
1048#endif
1049 };
1050} __rte_aligned(16) rte_int128_t;
1051
1052#ifdef __DOXYGEN__
1053
1093__rte_experimental
1094static inline int
1096 rte_int128_t *exp,
1097 const rte_int128_t *src,
1098 unsigned int weak,
1099 int success,
1100 int failure);
1101
1102#endif /* __DOXYGEN__ */
1103
1104#endif /* _RTE_ATOMIC_H_ */
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static void rte_atomic16_dec(rte_atomic16_t *v)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
static void rte_atomic_thread_fence(int memorder)
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static void rte_io_rmb(void)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition: rte_atomic.h:680
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_io_mb(void)
static void rte_io_wmb(void)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_atomic64_clear(rte_atomic64_t *v)
static void rte_smp_mb(void)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:330
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition: rte_atomic.h:229
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition: rte_atomic.h:401
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:243
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_mb(void)
static void rte_atomic32_inc(rte_atomic32_t *v)
static void rte_smp_wmb(void)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:257
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_atomic32_dec(rte_atomic32_t *v)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static void rte_atomic64_dec(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:609
static void rte_atomic16_init(rte_atomic16_t *v)
Definition: rte_atomic.h:201
static void rte_smp_rmb(void)
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static __rte_experimental int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition: rte_atomic.h:494
static void rte_wmb(void)
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:522
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition: rte_atomic.h:508
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:536
static void rte_atomic64_inc(rte_atomic64_t *v)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition: rte_atomic.h:215
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_atomic64_init(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:310
static void rte_atomic32_init(rte_atomic32_t *v)
Definition: rte_atomic.h:480
static void rte_atomic16_inc(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:589
#define RTE_STD_C11
Definition: rte_common.h:42
__extension__ struct rte_eth_link __rte_aligned(8)
volatile int16_t cnt
Definition: rte_atomic.h:186
volatile int32_t cnt
Definition: rte_atomic.h:465
volatile int64_t cnt
Definition: rte_atomic.h:743