DPDK  20.05.0
rte_atomic.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
15 #include <stdint.h>
16 #include <rte_common.h>
17 
18 #ifdef __DOXYGEN__
19 
22 
29 static inline void rte_mb(void);
30 
37 static inline void rte_wmb(void);
38 
45 static inline void rte_rmb(void);
47 
50 
58 static inline void rte_smp_mb(void);
59 
67 static inline void rte_smp_wmb(void);
68 
76 static inline void rte_smp_rmb(void);
78 
81 
89 static inline void rte_io_mb(void);
90 
98 static inline void rte_io_wmb(void);
99 
107 static inline void rte_io_rmb(void);
109 
129 
137 static inline void rte_cio_wmb(void);
138 
146 static inline void rte_cio_rmb(void);
148 
149 #endif /* __DOXYGEN__ */
150 
157 #define rte_compiler_barrier() do { \
158  asm volatile ("" : : : "memory"); \
159 } while(0)
160 
161 /*------------------------- 16 bit atomic operations -------------------------*/
162 
179 static inline int
180 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
181 
182 #ifdef RTE_FORCE_INTRINSICS
183 static inline int
184 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
185 {
186  return __sync_bool_compare_and_swap(dst, exp, src);
187 }
188 #endif
189 
205 static inline uint16_t
206 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
207 
208 #ifdef RTE_FORCE_INTRINSICS
209 static inline uint16_t
210 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
211 {
212 #if defined(__clang__)
213  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
214 #else
215  return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
216 #endif
217 }
218 #endif
219 
223 typedef struct {
224  volatile int16_t cnt;
226 
230 #define RTE_ATOMIC16_INIT(val) { (val) }
231 
238 static inline void
240 {
241  v->cnt = 0;
242 }
243 
252 static inline int16_t
254 {
255  return v->cnt;
256 }
257 
266 static inline void
267 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
268 {
269  v->cnt = new_value;
270 }
271 
280 static inline void
282 {
283  __sync_fetch_and_add(&v->cnt, inc);
284 }
285 
294 static inline void
296 {
297  __sync_fetch_and_sub(&v->cnt, dec);
298 }
299 
306 static inline void
308 
309 #ifdef RTE_FORCE_INTRINSICS
310 static inline void
312 {
313  rte_atomic16_add(v, 1);
314 }
315 #endif
316 
323 static inline void
325 
326 #ifdef RTE_FORCE_INTRINSICS
327 static inline void
329 {
330  rte_atomic16_sub(v, 1);
331 }
332 #endif
333 
347 static inline int16_t
349 {
350  return __sync_add_and_fetch(&v->cnt, inc);
351 }
352 
367 static inline int16_t
369 {
370  return __sync_sub_and_fetch(&v->cnt, dec);
371 }
372 
384 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
385 
386 #ifdef RTE_FORCE_INTRINSICS
387 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
388 {
389  return __sync_add_and_fetch(&v->cnt, 1) == 0;
390 }
391 #endif
392 
404 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
405 
406 #ifdef RTE_FORCE_INTRINSICS
407 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
408 {
409  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
410 }
411 #endif
412 
424 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
425 
426 #ifdef RTE_FORCE_INTRINSICS
427 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
428 {
429  return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
430 }
431 #endif
432 
439 static inline void rte_atomic16_clear(rte_atomic16_t *v)
440 {
441  v->cnt = 0;
442 }
443 
444 /*------------------------- 32 bit atomic operations -------------------------*/
445 
462 static inline int
463 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
464 
465 #ifdef RTE_FORCE_INTRINSICS
466 static inline int
467 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
468 {
469  return __sync_bool_compare_and_swap(dst, exp, src);
470 }
471 #endif
472 
488 static inline uint32_t
489 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
490 
491 #ifdef RTE_FORCE_INTRINSICS
492 static inline uint32_t
493 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
494 {
495 #if defined(__clang__)
496  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
497 #else
498  return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
499 #endif
500 }
501 #endif
502 
506 typedef struct {
507  volatile int32_t cnt;
509 
513 #define RTE_ATOMIC32_INIT(val) { (val) }
514 
521 static inline void
523 {
524  v->cnt = 0;
525 }
526 
535 static inline int32_t
537 {
538  return v->cnt;
539 }
540 
549 static inline void
550 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
551 {
552  v->cnt = new_value;
553 }
554 
563 static inline void
565 {
566  __sync_fetch_and_add(&v->cnt, inc);
567 }
568 
577 static inline void
579 {
580  __sync_fetch_and_sub(&v->cnt, dec);
581 }
582 
589 static inline void
591 
592 #ifdef RTE_FORCE_INTRINSICS
593 static inline void
595 {
596  rte_atomic32_add(v, 1);
597 }
598 #endif
599 
606 static inline void
608 
609 #ifdef RTE_FORCE_INTRINSICS
610 static inline void
612 {
613  rte_atomic32_sub(v,1);
614 }
615 #endif
616 
630 static inline int32_t
632 {
633  return __sync_add_and_fetch(&v->cnt, inc);
634 }
635 
650 static inline int32_t
652 {
653  return __sync_sub_and_fetch(&v->cnt, dec);
654 }
655 
667 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
668 
669 #ifdef RTE_FORCE_INTRINSICS
670 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
671 {
672  return __sync_add_and_fetch(&v->cnt, 1) == 0;
673 }
674 #endif
675 
687 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
688 
689 #ifdef RTE_FORCE_INTRINSICS
690 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
691 {
692  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
693 }
694 #endif
695 
707 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
708 
709 #ifdef RTE_FORCE_INTRINSICS
710 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
711 {
712  return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
713 }
714 #endif
715 
722 static inline void rte_atomic32_clear(rte_atomic32_t *v)
723 {
724  v->cnt = 0;
725 }
726 
727 /*------------------------- 64 bit atomic operations -------------------------*/
728 
744 static inline int
745 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
746 
747 #ifdef RTE_FORCE_INTRINSICS
748 static inline int
749 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
750 {
751  return __sync_bool_compare_and_swap(dst, exp, src);
752 }
753 #endif
754 
770 static inline uint64_t
771 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
772 
773 #ifdef RTE_FORCE_INTRINSICS
774 static inline uint64_t
775 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
776 {
777 #if defined(__clang__)
778  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
779 #else
780  return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
781 #endif
782 }
783 #endif
784 
788 typedef struct {
789  volatile int64_t cnt;
791 
795 #define RTE_ATOMIC64_INIT(val) { (val) }
796 
803 static inline void
805 
806 #ifdef RTE_FORCE_INTRINSICS
807 static inline void
809 {
810 #ifdef __LP64__
811  v->cnt = 0;
812 #else
813  int success = 0;
814  uint64_t tmp;
815 
816  while (success == 0) {
817  tmp = v->cnt;
818  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
819  tmp, 0);
820  }
821 #endif
822 }
823 #endif
824 
833 static inline int64_t
835 
836 #ifdef RTE_FORCE_INTRINSICS
837 static inline int64_t
839 {
840 #ifdef __LP64__
841  return v->cnt;
842 #else
843  int success = 0;
844  uint64_t tmp;
845 
846  while (success == 0) {
847  tmp = v->cnt;
848  /* replace the value by itself */
849  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
850  tmp, tmp);
851  }
852  return tmp;
853 #endif
854 }
855 #endif
856 
865 static inline void
866 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
867 
868 #ifdef RTE_FORCE_INTRINSICS
869 static inline void
870 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
871 {
872 #ifdef __LP64__
873  v->cnt = new_value;
874 #else
875  int success = 0;
876  uint64_t tmp;
877 
878  while (success == 0) {
879  tmp = v->cnt;
880  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
881  tmp, new_value);
882  }
883 #endif
884 }
885 #endif
886 
895 static inline void
896 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
897 
898 #ifdef RTE_FORCE_INTRINSICS
899 static inline void
900 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
901 {
902  __sync_fetch_and_add(&v->cnt, inc);
903 }
904 #endif
905 
914 static inline void
915 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
916 
917 #ifdef RTE_FORCE_INTRINSICS
918 static inline void
919 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
920 {
921  __sync_fetch_and_sub(&v->cnt, dec);
922 }
923 #endif
924 
931 static inline void
933 
934 #ifdef RTE_FORCE_INTRINSICS
935 static inline void
937 {
938  rte_atomic64_add(v, 1);
939 }
940 #endif
941 
948 static inline void
950 
951 #ifdef RTE_FORCE_INTRINSICS
952 static inline void
954 {
955  rte_atomic64_sub(v, 1);
956 }
957 #endif
958 
972 static inline int64_t
973 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
974 
975 #ifdef RTE_FORCE_INTRINSICS
976 static inline int64_t
978 {
979  return __sync_add_and_fetch(&v->cnt, inc);
980 }
981 #endif
982 
996 static inline int64_t
997 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
998 
999 #ifdef RTE_FORCE_INTRINSICS
1000 static inline int64_t
1001 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
1002 {
1003  return __sync_sub_and_fetch(&v->cnt, dec);
1004 }
1005 #endif
1006 
1018 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
1019 
1020 #ifdef RTE_FORCE_INTRINSICS
1021 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1022 {
1023  return rte_atomic64_add_return(v, 1) == 0;
1024 }
1025 #endif
1026 
1038 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1039 
1040 #ifdef RTE_FORCE_INTRINSICS
1041 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1042 {
1043  return rte_atomic64_sub_return(v, 1) == 0;
1044 }
1045 #endif
1046 
1058 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1059 
1060 #ifdef RTE_FORCE_INTRINSICS
1061 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1062 {
1063  return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1064 }
1065 #endif
1066 
1073 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1074 
1075 #ifdef RTE_FORCE_INTRINSICS
1076 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1077 {
1078  rte_atomic64_set(v, 0);
1079 }
1080 #endif
1081 
1082 /*------------------------ 128 bit atomic operations -------------------------*/
1083 
1088 typedef struct {
1089  RTE_STD_C11
1090  union {
1091  uint64_t val[2];
1092 #ifdef RTE_ARCH_64
1093  __extension__ __int128 int128;
1094 #endif
1095  };
1096 } __rte_aligned(16) rte_int128_t;
1097 
1098 #ifdef __DOXYGEN__
1099 
1139 __rte_experimental
1140 static inline int
1141 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1142  rte_int128_t *exp,
1143  const rte_int128_t *src,
1144  unsigned int weak,
1145  int success,
1146  int failure);
1147 
1148 #endif /* __DOXYGEN__ */
1149 
1150 #endif /* _RTE_ATOMIC_H_ */
static void rte_atomic32_inc(rte_atomic32_t *v)
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_smp_rmb(void)
static void rte_mb(void)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition: rte_atomic.h:253
static void rte_wmb(void)
volatile int16_t cnt
Definition: rte_atomic.h:224
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:564
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:368
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:578
static void rte_cio_rmb(void)
static void rte_atomic32_dec(rte_atomic32_t *v)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static void rte_io_mb(void)
static void rte_atomic64_inc(rte_atomic64_t *v)
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition: rte_atomic.h:550
volatile int32_t cnt
Definition: rte_atomic.h:507
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:631
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:281
static void rte_atomic32_init(rte_atomic32_t *v)
Definition: rte_atomic.h:522
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition: rte_atomic.h:439
volatile int64_t cnt
Definition: rte_atomic.h:789
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:295
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition: rte_atomic.h:267
static void rte_io_wmb(void)
static void rte_atomic64_clear(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:348
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_smp_wmb(void)
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition: rte_atomic.h:722
static void rte_atomic16_inc(rte_atomic16_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
#define RTE_STD_C11
Definition: rte_common.h:40
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:651
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_smp_mb(void)
__extension__ struct rte_eth_link __rte_aligned(8)
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_atomic16_dec(rte_atomic16_t *v)
static void rte_atomic64_init(rte_atomic64_t *v)
static void rte_atomic64_dec(rte_atomic64_t *v)
static void rte_atomic16_init(rte_atomic16_t *v)
Definition: rte_atomic.h:239
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static void rte_cio_wmb(void)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition: rte_atomic.h:536
static __rte_experimental int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static void rte_io_rmb(void)