DPDK  24.11.0-rc3
rte_atomic.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
15 #include <stdint.h>
16 
17 #include <rte_common.h>
18 #include <rte_stdatomic.h>
19 
20 #ifdef __cplusplus
21 extern "C" {
22 #endif
23 
24 #ifdef __DOXYGEN__
25 
28 
35 static inline void rte_mb(void);
36 
43 static inline void rte_wmb(void);
44 
51 static inline void rte_rmb(void);
53 
56 
72 static inline void rte_smp_mb(void);
73 
92 static inline void rte_smp_wmb(void);
93 
112 static inline void rte_smp_rmb(void);
114 
117 
125 static inline void rte_io_mb(void);
126 
134 static inline void rte_io_wmb(void);
135 
143 static inline void rte_io_rmb(void);
145 
146 #endif /* __DOXYGEN__ */
147 
154 #ifdef RTE_TOOLCHAIN_MSVC
155 #define rte_compiler_barrier() _ReadWriteBarrier()
156 #else
157 #define rte_compiler_barrier() do { \
158  asm volatile ("" : : : "memory"); \
159 } while(0)
160 #endif
161 
165 static inline void rte_atomic_thread_fence(rte_memory_order memorder);
166 
167 /*------------------------- 16 bit atomic operations -------------------------*/
168 
169 #ifndef RTE_TOOLCHAIN_MSVC
170 
187 static inline int
188 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
189 
190 #ifdef RTE_FORCE_INTRINSICS
191 static inline int
192 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
193 {
194  return __sync_bool_compare_and_swap(dst, exp, src);
195 }
196 #endif
197 
213 static inline uint16_t
214 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
215 
216 #ifdef RTE_FORCE_INTRINSICS
217 static inline uint16_t
218 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
219 {
220  return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
221 }
222 #endif
223 
227 typedef struct {
228  volatile int16_t cnt;
230 
234 #define RTE_ATOMIC16_INIT(val) { (val) }
235 
242 static inline void
244 {
245  v->cnt = 0;
246 }
247 
256 static inline int16_t
258 {
259  return v->cnt;
260 }
261 
270 static inline void
271 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
272 {
273  v->cnt = new_value;
274 }
275 
284 static inline void
286 {
287  rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, inc,
288  rte_memory_order_seq_cst);
289 }
290 
299 static inline void
301 {
302  rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, dec,
303  rte_memory_order_seq_cst);
304 }
305 
312 static inline void
314 
315 #ifdef RTE_FORCE_INTRINSICS
316 static inline void
318 {
319  rte_atomic16_add(v, 1);
320 }
321 #endif
322 
329 static inline void
331 
332 #ifdef RTE_FORCE_INTRINSICS
333 static inline void
335 {
336  rte_atomic16_sub(v, 1);
337 }
338 #endif
339 
353 static inline int16_t
355 {
356  return rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, inc,
357  rte_memory_order_seq_cst) + inc;
358 }
359 
374 static inline int16_t
376 {
377  return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, dec,
378  rte_memory_order_seq_cst) - dec;
379 }
380 
392 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
393 
394 #ifdef RTE_FORCE_INTRINSICS
395 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
396 {
397  return rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, 1,
398  rte_memory_order_seq_cst) + 1 == 0;
399 }
400 #endif
401 
413 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
414 
415 #ifdef RTE_FORCE_INTRINSICS
416 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
417 {
418  return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, 1,
419  rte_memory_order_seq_cst) - 1 == 0;
420 }
421 #endif
422 
434 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
435 
436 #ifdef RTE_FORCE_INTRINSICS
437 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
438 {
439  return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
440 }
441 #endif
442 
449 static inline void rte_atomic16_clear(rte_atomic16_t *v)
450 {
451  v->cnt = 0;
452 }
453 
454 /*------------------------- 32 bit atomic operations -------------------------*/
455 
472 static inline int
473 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
474 
475 #ifdef RTE_FORCE_INTRINSICS
476 static inline int
477 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
478 {
479  return __sync_bool_compare_and_swap(dst, exp, src);
480 }
481 #endif
482 
498 static inline uint32_t
499 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
500 
501 #ifdef RTE_FORCE_INTRINSICS
502 static inline uint32_t
503 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
504 {
505  return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
506 }
507 #endif
508 
512 typedef struct {
513  volatile int32_t cnt;
515 
519 #define RTE_ATOMIC32_INIT(val) { (val) }
520 
527 static inline void
529 {
530  v->cnt = 0;
531 }
532 
541 static inline int32_t
543 {
544  return v->cnt;
545 }
546 
555 static inline void
556 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
557 {
558  v->cnt = new_value;
559 }
560 
569 static inline void
571 {
572  rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, inc,
573  rte_memory_order_seq_cst);
574 }
575 
584 static inline void
586 {
587  rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, dec,
588  rte_memory_order_seq_cst);
589 }
590 
597 static inline void
599 
600 #ifdef RTE_FORCE_INTRINSICS
601 static inline void
603 {
604  rte_atomic32_add(v, 1);
605 }
606 #endif
607 
614 static inline void
616 
617 #ifdef RTE_FORCE_INTRINSICS
618 static inline void
620 {
621  rte_atomic32_sub(v,1);
622 }
623 #endif
624 
638 static inline int32_t
640 {
641  return rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, inc,
642  rte_memory_order_seq_cst) + inc;
643 }
644 
659 static inline int32_t
661 {
662  return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, dec,
663  rte_memory_order_seq_cst) - dec;
664 }
665 
677 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
678 
679 #ifdef RTE_FORCE_INTRINSICS
680 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
681 {
682  return rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, 1,
683  rte_memory_order_seq_cst) + 1 == 0;
684 }
685 #endif
686 
698 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
699 
700 #ifdef RTE_FORCE_INTRINSICS
701 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
702 {
703  return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, 1,
704  rte_memory_order_seq_cst) - 1 == 0;
705 }
706 #endif
707 
719 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
720 
721 #ifdef RTE_FORCE_INTRINSICS
722 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
723 {
724  return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
725 }
726 #endif
727 
734 static inline void rte_atomic32_clear(rte_atomic32_t *v)
735 {
736  v->cnt = 0;
737 }
738 
739 /*------------------------- 64 bit atomic operations -------------------------*/
740 
756 static inline int
757 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
758 
759 #ifdef RTE_FORCE_INTRINSICS
760 static inline int
761 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
762 {
763  return __sync_bool_compare_and_swap(dst, exp, src);
764 }
765 #endif
766 
782 static inline uint64_t
783 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
784 
785 #ifdef RTE_FORCE_INTRINSICS
786 static inline uint64_t
787 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
788 {
789  return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
790 }
791 #endif
792 
796 typedef struct {
797  volatile int64_t cnt;
799 
803 #define RTE_ATOMIC64_INIT(val) { (val) }
804 
811 static inline void
813 
814 #ifdef RTE_FORCE_INTRINSICS
815 static inline void
817 {
818 #ifdef __LP64__
819  v->cnt = 0;
820 #else
821  int success = 0;
822  uint64_t tmp;
823 
824  while (success == 0) {
825  tmp = v->cnt;
826  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
827  tmp, 0);
828  }
829 #endif
830 }
831 #endif
832 
841 static inline int64_t
843 
844 #ifdef RTE_FORCE_INTRINSICS
845 static inline int64_t
847 {
848 #ifdef __LP64__
849  return v->cnt;
850 #else
851  int success = 0;
852  uint64_t tmp;
853 
854  while (success == 0) {
855  tmp = v->cnt;
856  /* replace the value by itself */
857  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
858  tmp, tmp);
859  }
860  return tmp;
861 #endif
862 }
863 #endif
864 
873 static inline void
874 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
875 
876 #ifdef RTE_FORCE_INTRINSICS
877 static inline void
878 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
879 {
880 #ifdef __LP64__
881  v->cnt = new_value;
882 #else
883  int success = 0;
884  uint64_t tmp;
885 
886  while (success == 0) {
887  tmp = v->cnt;
888  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
889  tmp, new_value);
890  }
891 #endif
892 }
893 #endif
894 
903 static inline void
904 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
905 
906 #ifdef RTE_FORCE_INTRINSICS
907 static inline void
908 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
909 {
910  rte_atomic_fetch_add_explicit((volatile __rte_atomic int64_t *)&v->cnt, inc,
911  rte_memory_order_seq_cst);
912 }
913 #endif
914 
923 static inline void
924 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
925 
926 #ifdef RTE_FORCE_INTRINSICS
927 static inline void
928 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
929 {
930  rte_atomic_fetch_sub_explicit((volatile __rte_atomic int64_t *)&v->cnt, dec,
931  rte_memory_order_seq_cst);
932 }
933 #endif
934 
941 static inline void
943 
944 #ifdef RTE_FORCE_INTRINSICS
945 static inline void
947 {
948  rte_atomic64_add(v, 1);
949 }
950 #endif
951 
958 static inline void
960 
961 #ifdef RTE_FORCE_INTRINSICS
962 static inline void
964 {
965  rte_atomic64_sub(v, 1);
966 }
967 #endif
968 
982 static inline int64_t
983 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
984 
985 #ifdef RTE_FORCE_INTRINSICS
986 static inline int64_t
988 {
989  return rte_atomic_fetch_add_explicit((volatile __rte_atomic int64_t *)&v->cnt, inc,
990  rte_memory_order_seq_cst) + inc;
991 }
992 #endif
993 
1007 static inline int64_t
1008 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
1009 
1010 #ifdef RTE_FORCE_INTRINSICS
1011 static inline int64_t
1012 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
1013 {
1014  return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int64_t *)&v->cnt, dec,
1015  rte_memory_order_seq_cst) - dec;
1016 }
1017 #endif
1018 
1030 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
1031 
1032 #ifdef RTE_FORCE_INTRINSICS
1033 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1034 {
1035  return rte_atomic64_add_return(v, 1) == 0;
1036 }
1037 #endif
1038 
1050 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1051 
1052 #ifdef RTE_FORCE_INTRINSICS
1053 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1054 {
1055  return rte_atomic64_sub_return(v, 1) == 0;
1056 }
1057 #endif
1058 
1070 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1071 
1072 #ifdef RTE_FORCE_INTRINSICS
1073 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1074 {
1075  return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1076 }
1077 #endif
1078 
1085 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1086 
1087 #ifdef RTE_FORCE_INTRINSICS
1088 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1089 {
1090  rte_atomic64_set(v, 0);
1091 }
1092 #endif
1093 
1094 #endif
1095 
1096 /*------------------------ 128 bit atomic operations -------------------------*/
1097 
1101 typedef struct __rte_aligned(16) {
1102  union {
1103  uint64_t val[2];
1104 #ifdef RTE_ARCH_64
1105 #ifndef RTE_TOOLCHAIN_MSVC
1106  __extension__ __int128 int128;
1107 #endif
1108 #endif
1109  };
1110 } rte_int128_t;
1111 
1112 #ifdef __DOXYGEN__
1113 
1153 static inline int
1154 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1155  rte_int128_t *exp,
1156  const rte_int128_t *src,
1157  unsigned int weak,
1158  int success,
1159  int failure);
1160 
1161 #endif /* __DOXYGEN__ */
1162 
1163 #ifdef __cplusplus
1164 }
1165 #endif
1166 
1167 #endif /* _RTE_ATOMIC_H_ */
static void rte_atomic32_inc(rte_atomic32_t *v)
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_smp_rmb(void)
static void rte_mb(void)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition: rte_atomic.h:257
static void rte_wmb(void)
volatile int16_t cnt
Definition: rte_atomic.h:228
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:570
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:375
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:585
static void rte_atomic32_dec(rte_atomic32_t *v)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static void rte_io_mb(void)
static void rte_atomic64_inc(rte_atomic64_t *v)
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition: rte_atomic.h:556
volatile int32_t cnt
Definition: rte_atomic.h:513
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:639
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:285
static void rte_atomic32_init(rte_atomic32_t *v)
Definition: rte_atomic.h:528
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition: rte_atomic.h:449
volatile int64_t cnt
Definition: rte_atomic.h:797
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:300
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition: rte_atomic.h:271
static void rte_io_wmb(void)
static void rte_atomic64_clear(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:354
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_smp_wmb(void)
static void rte_atomic_thread_fence(rte_memory_order memorder)
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition: rte_atomic.h:734
static void rte_atomic16_inc(rte_atomic16_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:660
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_smp_mb(void)
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_atomic16_dec(rte_atomic16_t *v)
static void rte_atomic64_init(rte_atomic64_t *v)
static void rte_atomic64_dec(rte_atomic64_t *v)
static void rte_atomic16_init(rte_atomic16_t *v)
Definition: rte_atomic.h:243
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition: rte_atomic.h:542
struct __rte_aligned(16)
Definition: rte_atomic.h:1101
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static void rte_io_rmb(void)