DPDK  20.08.0
rte_atomic.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
15 #include <stdint.h>
16 #include <rte_common.h>
17 
18 #ifdef __DOXYGEN__
19 
22 
29 static inline void rte_mb(void);
30 
37 static inline void rte_wmb(void);
38 
45 static inline void rte_rmb(void);
47 
50 
58 static inline void rte_smp_mb(void);
59 
67 static inline void rte_smp_wmb(void);
68 
76 static inline void rte_smp_rmb(void);
78 
81 
89 static inline void rte_io_mb(void);
90 
98 static inline void rte_io_wmb(void);
99 
107 static inline void rte_io_rmb(void);
109 
129 
137 static inline void rte_cio_wmb(void);
138 
146 static inline void rte_cio_rmb(void);
148 
149 #endif /* __DOXYGEN__ */
150 
157 #define rte_compiler_barrier() do { \
158  asm volatile ("" : : : "memory"); \
159 } while(0)
160 
164 static inline void rte_atomic_thread_fence(int memory_order);
165 
166 /*------------------------- 16 bit atomic operations -------------------------*/
167 
184 static inline int
185 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
186 
187 #ifdef RTE_FORCE_INTRINSICS
188 static inline int
189 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
190 {
191  return __sync_bool_compare_and_swap(dst, exp, src);
192 }
193 #endif
194 
210 static inline uint16_t
211 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
212 
213 #ifdef RTE_FORCE_INTRINSICS
214 static inline uint16_t
215 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
216 {
217 #if defined(__clang__)
218  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
219 #else
220  return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
221 #endif
222 }
223 #endif
224 
228 typedef struct {
229  volatile int16_t cnt;
231 
235 #define RTE_ATOMIC16_INIT(val) { (val) }
236 
243 static inline void
245 {
246  v->cnt = 0;
247 }
248 
257 static inline int16_t
259 {
260  return v->cnt;
261 }
262 
271 static inline void
272 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
273 {
274  v->cnt = new_value;
275 }
276 
285 static inline void
287 {
288  __sync_fetch_and_add(&v->cnt, inc);
289 }
290 
299 static inline void
301 {
302  __sync_fetch_and_sub(&v->cnt, dec);
303 }
304 
311 static inline void
313 
314 #ifdef RTE_FORCE_INTRINSICS
315 static inline void
317 {
318  rte_atomic16_add(v, 1);
319 }
320 #endif
321 
328 static inline void
330 
331 #ifdef RTE_FORCE_INTRINSICS
332 static inline void
334 {
335  rte_atomic16_sub(v, 1);
336 }
337 #endif
338 
352 static inline int16_t
354 {
355  return __sync_add_and_fetch(&v->cnt, inc);
356 }
357 
372 static inline int16_t
374 {
375  return __sync_sub_and_fetch(&v->cnt, dec);
376 }
377 
389 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
390 
391 #ifdef RTE_FORCE_INTRINSICS
392 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
393 {
394  return __sync_add_and_fetch(&v->cnt, 1) == 0;
395 }
396 #endif
397 
409 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
410 
411 #ifdef RTE_FORCE_INTRINSICS
412 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
413 {
414  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
415 }
416 #endif
417 
429 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
430 
431 #ifdef RTE_FORCE_INTRINSICS
432 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
433 {
434  return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
435 }
436 #endif
437 
444 static inline void rte_atomic16_clear(rte_atomic16_t *v)
445 {
446  v->cnt = 0;
447 }
448 
449 /*------------------------- 32 bit atomic operations -------------------------*/
450 
467 static inline int
468 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
469 
470 #ifdef RTE_FORCE_INTRINSICS
471 static inline int
472 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
473 {
474  return __sync_bool_compare_and_swap(dst, exp, src);
475 }
476 #endif
477 
493 static inline uint32_t
494 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
495 
496 #ifdef RTE_FORCE_INTRINSICS
497 static inline uint32_t
498 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
499 {
500 #if defined(__clang__)
501  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
502 #else
503  return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
504 #endif
505 }
506 #endif
507 
511 typedef struct {
512  volatile int32_t cnt;
514 
518 #define RTE_ATOMIC32_INIT(val) { (val) }
519 
526 static inline void
528 {
529  v->cnt = 0;
530 }
531 
540 static inline int32_t
542 {
543  return v->cnt;
544 }
545 
554 static inline void
555 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
556 {
557  v->cnt = new_value;
558 }
559 
568 static inline void
570 {
571  __sync_fetch_and_add(&v->cnt, inc);
572 }
573 
582 static inline void
584 {
585  __sync_fetch_and_sub(&v->cnt, dec);
586 }
587 
594 static inline void
596 
597 #ifdef RTE_FORCE_INTRINSICS
598 static inline void
600 {
601  rte_atomic32_add(v, 1);
602 }
603 #endif
604 
611 static inline void
613 
614 #ifdef RTE_FORCE_INTRINSICS
615 static inline void
617 {
618  rte_atomic32_sub(v,1);
619 }
620 #endif
621 
635 static inline int32_t
637 {
638  return __sync_add_and_fetch(&v->cnt, inc);
639 }
640 
655 static inline int32_t
657 {
658  return __sync_sub_and_fetch(&v->cnt, dec);
659 }
660 
672 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
673 
674 #ifdef RTE_FORCE_INTRINSICS
675 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
676 {
677  return __sync_add_and_fetch(&v->cnt, 1) == 0;
678 }
679 #endif
680 
692 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
693 
694 #ifdef RTE_FORCE_INTRINSICS
695 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
696 {
697  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
698 }
699 #endif
700 
712 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
713 
714 #ifdef RTE_FORCE_INTRINSICS
715 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
716 {
717  return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
718 }
719 #endif
720 
727 static inline void rte_atomic32_clear(rte_atomic32_t *v)
728 {
729  v->cnt = 0;
730 }
731 
732 /*------------------------- 64 bit atomic operations -------------------------*/
733 
749 static inline int
750 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
751 
752 #ifdef RTE_FORCE_INTRINSICS
753 static inline int
754 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
755 {
756  return __sync_bool_compare_and_swap(dst, exp, src);
757 }
758 #endif
759 
775 static inline uint64_t
776 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
777 
778 #ifdef RTE_FORCE_INTRINSICS
779 static inline uint64_t
780 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
781 {
782 #if defined(__clang__)
783  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
784 #else
785  return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
786 #endif
787 }
788 #endif
789 
793 typedef struct {
794  volatile int64_t cnt;
796 
800 #define RTE_ATOMIC64_INIT(val) { (val) }
801 
808 static inline void
810 
811 #ifdef RTE_FORCE_INTRINSICS
812 static inline void
814 {
815 #ifdef __LP64__
816  v->cnt = 0;
817 #else
818  int success = 0;
819  uint64_t tmp;
820 
821  while (success == 0) {
822  tmp = v->cnt;
823  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
824  tmp, 0);
825  }
826 #endif
827 }
828 #endif
829 
838 static inline int64_t
840 
841 #ifdef RTE_FORCE_INTRINSICS
842 static inline int64_t
844 {
845 #ifdef __LP64__
846  return v->cnt;
847 #else
848  int success = 0;
849  uint64_t tmp;
850 
851  while (success == 0) {
852  tmp = v->cnt;
853  /* replace the value by itself */
854  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
855  tmp, tmp);
856  }
857  return tmp;
858 #endif
859 }
860 #endif
861 
870 static inline void
871 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
872 
873 #ifdef RTE_FORCE_INTRINSICS
874 static inline void
875 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
876 {
877 #ifdef __LP64__
878  v->cnt = new_value;
879 #else
880  int success = 0;
881  uint64_t tmp;
882 
883  while (success == 0) {
884  tmp = v->cnt;
885  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
886  tmp, new_value);
887  }
888 #endif
889 }
890 #endif
891 
900 static inline void
901 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
902 
903 #ifdef RTE_FORCE_INTRINSICS
904 static inline void
905 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
906 {
907  __sync_fetch_and_add(&v->cnt, inc);
908 }
909 #endif
910 
919 static inline void
920 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
921 
922 #ifdef RTE_FORCE_INTRINSICS
923 static inline void
924 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
925 {
926  __sync_fetch_and_sub(&v->cnt, dec);
927 }
928 #endif
929 
936 static inline void
938 
939 #ifdef RTE_FORCE_INTRINSICS
940 static inline void
942 {
943  rte_atomic64_add(v, 1);
944 }
945 #endif
946 
953 static inline void
955 
956 #ifdef RTE_FORCE_INTRINSICS
957 static inline void
959 {
960  rte_atomic64_sub(v, 1);
961 }
962 #endif
963 
977 static inline int64_t
978 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
979 
980 #ifdef RTE_FORCE_INTRINSICS
981 static inline int64_t
983 {
984  return __sync_add_and_fetch(&v->cnt, inc);
985 }
986 #endif
987 
1001 static inline int64_t
1002 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
1003 
1004 #ifdef RTE_FORCE_INTRINSICS
1005 static inline int64_t
1006 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
1007 {
1008  return __sync_sub_and_fetch(&v->cnt, dec);
1009 }
1010 #endif
1011 
1023 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
1024 
1025 #ifdef RTE_FORCE_INTRINSICS
1026 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1027 {
1028  return rte_atomic64_add_return(v, 1) == 0;
1029 }
1030 #endif
1031 
1043 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1044 
1045 #ifdef RTE_FORCE_INTRINSICS
1046 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1047 {
1048  return rte_atomic64_sub_return(v, 1) == 0;
1049 }
1050 #endif
1051 
1063 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1064 
1065 #ifdef RTE_FORCE_INTRINSICS
1066 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1067 {
1068  return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1069 }
1070 #endif
1071 
1078 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1079 
1080 #ifdef RTE_FORCE_INTRINSICS
1081 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1082 {
1083  rte_atomic64_set(v, 0);
1084 }
1085 #endif
1086 
1087 /*------------------------ 128 bit atomic operations -------------------------*/
1088 
1093 typedef struct {
1094  RTE_STD_C11
1095  union {
1096  uint64_t val[2];
1097 #ifdef RTE_ARCH_64
1098  __extension__ __int128 int128;
1099 #endif
1100  };
1101 } __rte_aligned(16) rte_int128_t;
1102 
1103 #ifdef __DOXYGEN__
1104 
1144 __rte_experimental
1145 static inline int
1146 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1147  rte_int128_t *exp,
1148  const rte_int128_t *src,
1149  unsigned int weak,
1150  int success,
1151  int failure);
1152 
1153 #endif /* __DOXYGEN__ */
1154 
1155 #endif /* _RTE_ATOMIC_H_ */
static void rte_atomic32_inc(rte_atomic32_t *v)
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_smp_rmb(void)
static void rte_mb(void)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition: rte_atomic.h:258
static void rte_wmb(void)
volatile int16_t cnt
Definition: rte_atomic.h:229
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:569
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:373
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:583
static void rte_cio_rmb(void)
static void rte_atomic32_dec(rte_atomic32_t *v)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static void rte_io_mb(void)
static void rte_atomic64_inc(rte_atomic64_t *v)
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition: rte_atomic.h:555
volatile int32_t cnt
Definition: rte_atomic.h:512
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:636
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:286
static void rte_atomic32_init(rte_atomic32_t *v)
Definition: rte_atomic.h:527
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition: rte_atomic.h:444
volatile int64_t cnt
Definition: rte_atomic.h:794
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:300
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition: rte_atomic.h:272
static void rte_io_wmb(void)
static void rte_atomic64_clear(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:353
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_smp_wmb(void)
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition: rte_atomic.h:727
static void rte_atomic16_inc(rte_atomic16_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
#define RTE_STD_C11
Definition: rte_common.h:40
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:656
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_smp_mb(void)
static void rte_atomic_thread_fence(int memory_order)
__extension__ struct rte_eth_link __rte_aligned(8)
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_atomic16_dec(rte_atomic16_t *v)
static void rte_atomic64_init(rte_atomic64_t *v)
static void rte_atomic64_dec(rte_atomic64_t *v)
static void rte_atomic16_init(rte_atomic16_t *v)
Definition: rte_atomic.h:244
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static void rte_cio_wmb(void)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition: rte_atomic.h:541
static __rte_experimental int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static void rte_io_rmb(void)