DPDK  24.03.0-rc4
rte_atomic.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
15 #include <stdint.h>
16 
17 #include <rte_common.h>
18 #include <rte_stdatomic.h>
19 
20 #ifdef __DOXYGEN__
21 
24 
31 static inline void rte_mb(void);
32 
39 static inline void rte_wmb(void);
40 
47 static inline void rte_rmb(void);
49 
52 
68 static inline void rte_smp_mb(void);
69 
88 static inline void rte_smp_wmb(void);
89 
108 static inline void rte_smp_rmb(void);
110 
113 
121 static inline void rte_io_mb(void);
122 
130 static inline void rte_io_wmb(void);
131 
139 static inline void rte_io_rmb(void);
141 
142 #endif /* __DOXYGEN__ */
143 
150 #ifdef RTE_TOOLCHAIN_MSVC
151 #define rte_compiler_barrier() _ReadWriteBarrier()
152 #else
153 #define rte_compiler_barrier() do { \
154  asm volatile ("" : : : "memory"); \
155 } while(0)
156 #endif
157 
161 static inline void rte_atomic_thread_fence(rte_memory_order memorder);
162 
163 /*------------------------- 16 bit atomic operations -------------------------*/
164 
165 #ifndef RTE_TOOLCHAIN_MSVC
166 
183 static inline int
184 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
185 
186 #ifdef RTE_FORCE_INTRINSICS
187 static inline int
188 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
189 {
190  return __sync_bool_compare_and_swap(dst, exp, src);
191 }
192 #endif
193 
209 static inline uint16_t
210 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
211 
212 #ifdef RTE_FORCE_INTRINSICS
213 static inline uint16_t
214 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
215 {
216  return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
217 }
218 #endif
219 
223 typedef struct {
224  volatile int16_t cnt;
226 
230 #define RTE_ATOMIC16_INIT(val) { (val) }
231 
238 static inline void
240 {
241  v->cnt = 0;
242 }
243 
252 static inline int16_t
254 {
255  return v->cnt;
256 }
257 
266 static inline void
267 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
268 {
269  v->cnt = new_value;
270 }
271 
280 static inline void
282 {
283  rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, inc,
284  rte_memory_order_seq_cst);
285 }
286 
295 static inline void
297 {
298  rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, dec,
299  rte_memory_order_seq_cst);
300 }
301 
308 static inline void
310 
311 #ifdef RTE_FORCE_INTRINSICS
312 static inline void
314 {
315  rte_atomic16_add(v, 1);
316 }
317 #endif
318 
325 static inline void
327 
328 #ifdef RTE_FORCE_INTRINSICS
329 static inline void
331 {
332  rte_atomic16_sub(v, 1);
333 }
334 #endif
335 
349 static inline int16_t
351 {
352  return rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, inc,
353  rte_memory_order_seq_cst) + inc;
354 }
355 
370 static inline int16_t
372 {
373  return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, dec,
374  rte_memory_order_seq_cst) - dec;
375 }
376 
388 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
389 
390 #ifdef RTE_FORCE_INTRINSICS
391 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
392 {
393  return rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, 1,
394  rte_memory_order_seq_cst) + 1 == 0;
395 }
396 #endif
397 
409 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
410 
411 #ifdef RTE_FORCE_INTRINSICS
412 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
413 {
414  return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, 1,
415  rte_memory_order_seq_cst) - 1 == 0;
416 }
417 #endif
418 
430 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
431 
432 #ifdef RTE_FORCE_INTRINSICS
433 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
434 {
435  return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
436 }
437 #endif
438 
445 static inline void rte_atomic16_clear(rte_atomic16_t *v)
446 {
447  v->cnt = 0;
448 }
449 
450 /*------------------------- 32 bit atomic operations -------------------------*/
451 
468 static inline int
469 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
470 
471 #ifdef RTE_FORCE_INTRINSICS
472 static inline int
473 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
474 {
475  return __sync_bool_compare_and_swap(dst, exp, src);
476 }
477 #endif
478 
494 static inline uint32_t
495 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
496 
497 #ifdef RTE_FORCE_INTRINSICS
498 static inline uint32_t
499 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
500 {
501  return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
502 }
503 #endif
504 
508 typedef struct {
509  volatile int32_t cnt;
511 
515 #define RTE_ATOMIC32_INIT(val) { (val) }
516 
523 static inline void
525 {
526  v->cnt = 0;
527 }
528 
537 static inline int32_t
539 {
540  return v->cnt;
541 }
542 
551 static inline void
552 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
553 {
554  v->cnt = new_value;
555 }
556 
565 static inline void
567 {
568  rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, inc,
569  rte_memory_order_seq_cst);
570 }
571 
580 static inline void
582 {
583  rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, dec,
584  rte_memory_order_seq_cst);
585 }
586 
593 static inline void
595 
596 #ifdef RTE_FORCE_INTRINSICS
597 static inline void
599 {
600  rte_atomic32_add(v, 1);
601 }
602 #endif
603 
610 static inline void
612 
613 #ifdef RTE_FORCE_INTRINSICS
614 static inline void
616 {
617  rte_atomic32_sub(v,1);
618 }
619 #endif
620 
634 static inline int32_t
636 {
637  return rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, inc,
638  rte_memory_order_seq_cst) + inc;
639 }
640 
655 static inline int32_t
657 {
658  return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, dec,
659  rte_memory_order_seq_cst) - dec;
660 }
661 
673 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
674 
675 #ifdef RTE_FORCE_INTRINSICS
676 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
677 {
678  return rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, 1,
679  rte_memory_order_seq_cst) + 1 == 0;
680 }
681 #endif
682 
694 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
695 
696 #ifdef RTE_FORCE_INTRINSICS
697 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
698 {
699  return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, 1,
700  rte_memory_order_seq_cst) - 1 == 0;
701 }
702 #endif
703 
715 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
716 
717 #ifdef RTE_FORCE_INTRINSICS
718 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
719 {
720  return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
721 }
722 #endif
723 
730 static inline void rte_atomic32_clear(rte_atomic32_t *v)
731 {
732  v->cnt = 0;
733 }
734 
735 /*------------------------- 64 bit atomic operations -------------------------*/
736 
752 static inline int
753 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
754 
755 #ifdef RTE_FORCE_INTRINSICS
756 static inline int
757 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
758 {
759  return __sync_bool_compare_and_swap(dst, exp, src);
760 }
761 #endif
762 
778 static inline uint64_t
779 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
780 
781 #ifdef RTE_FORCE_INTRINSICS
782 static inline uint64_t
783 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
784 {
785  return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
786 }
787 #endif
788 
792 typedef struct {
793  volatile int64_t cnt;
795 
799 #define RTE_ATOMIC64_INIT(val) { (val) }
800 
807 static inline void
809 
810 #ifdef RTE_FORCE_INTRINSICS
811 static inline void
813 {
814 #ifdef __LP64__
815  v->cnt = 0;
816 #else
817  int success = 0;
818  uint64_t tmp;
819 
820  while (success == 0) {
821  tmp = v->cnt;
822  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
823  tmp, 0);
824  }
825 #endif
826 }
827 #endif
828 
837 static inline int64_t
839 
840 #ifdef RTE_FORCE_INTRINSICS
841 static inline int64_t
843 {
844 #ifdef __LP64__
845  return v->cnt;
846 #else
847  int success = 0;
848  uint64_t tmp;
849 
850  while (success == 0) {
851  tmp = v->cnt;
852  /* replace the value by itself */
853  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
854  tmp, tmp);
855  }
856  return tmp;
857 #endif
858 }
859 #endif
860 
869 static inline void
870 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
871 
872 #ifdef RTE_FORCE_INTRINSICS
873 static inline void
874 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
875 {
876 #ifdef __LP64__
877  v->cnt = new_value;
878 #else
879  int success = 0;
880  uint64_t tmp;
881 
882  while (success == 0) {
883  tmp = v->cnt;
884  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
885  tmp, new_value);
886  }
887 #endif
888 }
889 #endif
890 
899 static inline void
900 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
901 
902 #ifdef RTE_FORCE_INTRINSICS
903 static inline void
904 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
905 {
906  rte_atomic_fetch_add_explicit((volatile __rte_atomic int64_t *)&v->cnt, inc,
907  rte_memory_order_seq_cst);
908 }
909 #endif
910 
919 static inline void
920 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
921 
922 #ifdef RTE_FORCE_INTRINSICS
923 static inline void
924 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
925 {
926  rte_atomic_fetch_sub_explicit((volatile __rte_atomic int64_t *)&v->cnt, dec,
927  rte_memory_order_seq_cst);
928 }
929 #endif
930 
937 static inline void
939 
940 #ifdef RTE_FORCE_INTRINSICS
941 static inline void
943 {
944  rte_atomic64_add(v, 1);
945 }
946 #endif
947 
954 static inline void
956 
957 #ifdef RTE_FORCE_INTRINSICS
958 static inline void
960 {
961  rte_atomic64_sub(v, 1);
962 }
963 #endif
964 
978 static inline int64_t
979 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
980 
981 #ifdef RTE_FORCE_INTRINSICS
982 static inline int64_t
984 {
985  return rte_atomic_fetch_add_explicit((volatile __rte_atomic int64_t *)&v->cnt, inc,
986  rte_memory_order_seq_cst) + inc;
987 }
988 #endif
989 
1003 static inline int64_t
1004 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
1005 
1006 #ifdef RTE_FORCE_INTRINSICS
1007 static inline int64_t
1008 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
1009 {
1010  return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int64_t *)&v->cnt, dec,
1011  rte_memory_order_seq_cst) - dec;
1012 }
1013 #endif
1014 
1026 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
1027 
1028 #ifdef RTE_FORCE_INTRINSICS
1029 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1030 {
1031  return rte_atomic64_add_return(v, 1) == 0;
1032 }
1033 #endif
1034 
1046 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1047 
1048 #ifdef RTE_FORCE_INTRINSICS
1049 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1050 {
1051  return rte_atomic64_sub_return(v, 1) == 0;
1052 }
1053 #endif
1054 
1066 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1067 
1068 #ifdef RTE_FORCE_INTRINSICS
1069 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1070 {
1071  return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1072 }
1073 #endif
1074 
1081 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1082 
1083 #ifdef RTE_FORCE_INTRINSICS
1084 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1085 {
1086  rte_atomic64_set(v, 0);
1087 }
1088 #endif
1089 
1090 #endif
1091 
1092 /*------------------------ 128 bit atomic operations -------------------------*/
1093 
1097 typedef struct __rte_aligned(16) {
1098  union {
1099  uint64_t val[2];
1100 #ifdef RTE_ARCH_64
1101 #ifndef RTE_TOOLCHAIN_MSVC
1102  __extension__ __int128 int128;
1103 #endif
1104 #endif
1105  };
1106 } rte_int128_t;
1107 
1108 #ifdef __DOXYGEN__
1109 
1149 static inline int
1150 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1151  rte_int128_t *exp,
1152  const rte_int128_t *src,
1153  unsigned int weak,
1154  int success,
1155  int failure);
1156 
1157 #endif /* __DOXYGEN__ */
1158 
1159 #endif /* _RTE_ATOMIC_H_ */
static void rte_atomic32_inc(rte_atomic32_t *v)
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_smp_rmb(void)
static void rte_mb(void)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition: rte_atomic.h:253
static void rte_wmb(void)
volatile int16_t cnt
Definition: rte_atomic.h:224
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:566
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:371
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:581
static void rte_atomic32_dec(rte_atomic32_t *v)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static void rte_io_mb(void)
static void rte_atomic64_inc(rte_atomic64_t *v)
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition: rte_atomic.h:552
volatile int32_t cnt
Definition: rte_atomic.h:509
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:635
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:281
static void rte_atomic32_init(rte_atomic32_t *v)
Definition: rte_atomic.h:524
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition: rte_atomic.h:445
volatile int64_t cnt
Definition: rte_atomic.h:793
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:296
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition: rte_atomic.h:267
static void rte_io_wmb(void)
static void rte_atomic64_clear(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:350
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_smp_wmb(void)
static void rte_atomic_thread_fence(rte_memory_order memorder)
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition: rte_atomic.h:730
static void rte_atomic16_inc(rte_atomic16_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:656
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_smp_mb(void)
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_atomic16_dec(rte_atomic16_t *v)
static void rte_atomic64_init(rte_atomic64_t *v)
static void rte_atomic64_dec(rte_atomic64_t *v)
static void rte_atomic16_init(rte_atomic16_t *v)
Definition: rte_atomic.h:239
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition: rte_atomic.h:538
struct __rte_aligned(16)
Definition: rte_atomic.h:1097
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static void rte_io_rmb(void)