DPDK  23.07.0
rte_atomic.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
15 #include <stdint.h>
16 #include <rte_compat.h>
17 #include <rte_common.h>
18 
19 #ifdef __DOXYGEN__
20 
23 
30 static inline void rte_mb(void);
31 
38 static inline void rte_wmb(void);
39 
46 static inline void rte_rmb(void);
48 
51 
67 static inline void rte_smp_mb(void);
68 
87 static inline void rte_smp_wmb(void);
88 
107 static inline void rte_smp_rmb(void);
109 
112 
120 static inline void rte_io_mb(void);
121 
129 static inline void rte_io_wmb(void);
130 
138 static inline void rte_io_rmb(void);
140 
141 #endif /* __DOXYGEN__ */
142 
149 #define rte_compiler_barrier() do { \
150  asm volatile ("" : : : "memory"); \
151 } while(0)
152 
156 static inline void rte_atomic_thread_fence(int memorder);
157 
158 /*------------------------- 16 bit atomic operations -------------------------*/
159 
176 static inline int
177 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
178 
179 #ifdef RTE_FORCE_INTRINSICS
180 static inline int
181 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
182 {
183  return __sync_bool_compare_and_swap(dst, exp, src);
184 }
185 #endif
186 
202 static inline uint16_t
203 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
204 
205 #ifdef RTE_FORCE_INTRINSICS
206 static inline uint16_t
207 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
208 {
209  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
210 }
211 #endif
212 
216 typedef struct {
217  volatile int16_t cnt;
219 
223 #define RTE_ATOMIC16_INIT(val) { (val) }
224 
231 static inline void
233 {
234  v->cnt = 0;
235 }
236 
245 static inline int16_t
247 {
248  return v->cnt;
249 }
250 
259 static inline void
260 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
261 {
262  v->cnt = new_value;
263 }
264 
273 static inline void
275 {
276  __atomic_fetch_add(&v->cnt, inc, __ATOMIC_SEQ_CST);
277 }
278 
287 static inline void
289 {
290  __atomic_fetch_sub(&v->cnt, dec, __ATOMIC_SEQ_CST);
291 }
292 
299 static inline void
301 
302 #ifdef RTE_FORCE_INTRINSICS
303 static inline void
305 {
306  rte_atomic16_add(v, 1);
307 }
308 #endif
309 
316 static inline void
318 
319 #ifdef RTE_FORCE_INTRINSICS
320 static inline void
322 {
323  rte_atomic16_sub(v, 1);
324 }
325 #endif
326 
340 static inline int16_t
342 {
343  return __atomic_fetch_add(&v->cnt, inc, __ATOMIC_SEQ_CST) + inc;
344 }
345 
360 static inline int16_t
362 {
363  return __atomic_fetch_sub(&v->cnt, dec, __ATOMIC_SEQ_CST) - dec;
364 }
365 
377 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
378 
379 #ifdef RTE_FORCE_INTRINSICS
380 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
381 {
382  return __atomic_fetch_add(&v->cnt, 1, __ATOMIC_SEQ_CST) + 1 == 0;
383 }
384 #endif
385 
397 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
398 
399 #ifdef RTE_FORCE_INTRINSICS
400 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
401 {
402  return __atomic_fetch_sub(&v->cnt, 1, __ATOMIC_SEQ_CST) - 1 == 0;
403 }
404 #endif
405 
417 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
418 
419 #ifdef RTE_FORCE_INTRINSICS
420 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
421 {
422  return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
423 }
424 #endif
425 
432 static inline void rte_atomic16_clear(rte_atomic16_t *v)
433 {
434  v->cnt = 0;
435 }
436 
437 /*------------------------- 32 bit atomic operations -------------------------*/
438 
455 static inline int
456 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
457 
458 #ifdef RTE_FORCE_INTRINSICS
459 static inline int
460 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
461 {
462  return __sync_bool_compare_and_swap(dst, exp, src);
463 }
464 #endif
465 
481 static inline uint32_t
482 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
483 
484 #ifdef RTE_FORCE_INTRINSICS
485 static inline uint32_t
486 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
487 {
488  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
489 }
490 #endif
491 
495 typedef struct {
496  volatile int32_t cnt;
498 
502 #define RTE_ATOMIC32_INIT(val) { (val) }
503 
510 static inline void
512 {
513  v->cnt = 0;
514 }
515 
524 static inline int32_t
526 {
527  return v->cnt;
528 }
529 
538 static inline void
539 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
540 {
541  v->cnt = new_value;
542 }
543 
552 static inline void
554 {
555  __atomic_fetch_add(&v->cnt, inc, __ATOMIC_SEQ_CST);
556 }
557 
566 static inline void
568 {
569  __atomic_fetch_sub(&v->cnt, dec, __ATOMIC_SEQ_CST);
570 }
571 
578 static inline void
580 
581 #ifdef RTE_FORCE_INTRINSICS
582 static inline void
584 {
585  rte_atomic32_add(v, 1);
586 }
587 #endif
588 
595 static inline void
597 
598 #ifdef RTE_FORCE_INTRINSICS
599 static inline void
601 {
602  rte_atomic32_sub(v,1);
603 }
604 #endif
605 
619 static inline int32_t
621 {
622  return __atomic_fetch_add(&v->cnt, inc, __ATOMIC_SEQ_CST) + inc;
623 }
624 
639 static inline int32_t
641 {
642  return __atomic_fetch_sub(&v->cnt, dec, __ATOMIC_SEQ_CST) - dec;
643 }
644 
656 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
657 
658 #ifdef RTE_FORCE_INTRINSICS
659 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
660 {
661  return __atomic_fetch_add(&v->cnt, 1, __ATOMIC_SEQ_CST) + 1 == 0;
662 }
663 #endif
664 
676 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
677 
678 #ifdef RTE_FORCE_INTRINSICS
679 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
680 {
681  return __atomic_fetch_sub(&v->cnt, 1, __ATOMIC_SEQ_CST) - 1 == 0;
682 }
683 #endif
684 
696 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
697 
698 #ifdef RTE_FORCE_INTRINSICS
699 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
700 {
701  return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
702 }
703 #endif
704 
711 static inline void rte_atomic32_clear(rte_atomic32_t *v)
712 {
713  v->cnt = 0;
714 }
715 
716 /*------------------------- 64 bit atomic operations -------------------------*/
717 
733 static inline int
734 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
735 
736 #ifdef RTE_FORCE_INTRINSICS
737 static inline int
738 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
739 {
740  return __sync_bool_compare_and_swap(dst, exp, src);
741 }
742 #endif
743 
759 static inline uint64_t
760 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
761 
762 #ifdef RTE_FORCE_INTRINSICS
763 static inline uint64_t
764 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
765 {
766  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
767 }
768 #endif
769 
773 typedef struct {
774  volatile int64_t cnt;
776 
780 #define RTE_ATOMIC64_INIT(val) { (val) }
781 
788 static inline void
790 
791 #ifdef RTE_FORCE_INTRINSICS
792 static inline void
794 {
795 #ifdef __LP64__
796  v->cnt = 0;
797 #else
798  int success = 0;
799  uint64_t tmp;
800 
801  while (success == 0) {
802  tmp = v->cnt;
803  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
804  tmp, 0);
805  }
806 #endif
807 }
808 #endif
809 
818 static inline int64_t
820 
821 #ifdef RTE_FORCE_INTRINSICS
822 static inline int64_t
824 {
825 #ifdef __LP64__
826  return v->cnt;
827 #else
828  int success = 0;
829  uint64_t tmp;
830 
831  while (success == 0) {
832  tmp = v->cnt;
833  /* replace the value by itself */
834  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
835  tmp, tmp);
836  }
837  return tmp;
838 #endif
839 }
840 #endif
841 
850 static inline void
851 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
852 
853 #ifdef RTE_FORCE_INTRINSICS
854 static inline void
855 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
856 {
857 #ifdef __LP64__
858  v->cnt = new_value;
859 #else
860  int success = 0;
861  uint64_t tmp;
862 
863  while (success == 0) {
864  tmp = v->cnt;
865  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
866  tmp, new_value);
867  }
868 #endif
869 }
870 #endif
871 
880 static inline void
881 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
882 
883 #ifdef RTE_FORCE_INTRINSICS
884 static inline void
885 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
886 {
887  __atomic_fetch_add(&v->cnt, inc, __ATOMIC_SEQ_CST);
888 }
889 #endif
890 
899 static inline void
900 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
901 
902 #ifdef RTE_FORCE_INTRINSICS
903 static inline void
904 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
905 {
906  __atomic_fetch_sub(&v->cnt, dec, __ATOMIC_SEQ_CST);
907 }
908 #endif
909 
916 static inline void
918 
919 #ifdef RTE_FORCE_INTRINSICS
920 static inline void
922 {
923  rte_atomic64_add(v, 1);
924 }
925 #endif
926 
933 static inline void
935 
936 #ifdef RTE_FORCE_INTRINSICS
937 static inline void
939 {
940  rte_atomic64_sub(v, 1);
941 }
942 #endif
943 
957 static inline int64_t
958 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
959 
960 #ifdef RTE_FORCE_INTRINSICS
961 static inline int64_t
963 {
964  return __atomic_fetch_add(&v->cnt, inc, __ATOMIC_SEQ_CST) + inc;
965 }
966 #endif
967 
981 static inline int64_t
982 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
983 
984 #ifdef RTE_FORCE_INTRINSICS
985 static inline int64_t
987 {
988  return __atomic_fetch_sub(&v->cnt, dec, __ATOMIC_SEQ_CST) - dec;
989 }
990 #endif
991 
1003 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
1004 
1005 #ifdef RTE_FORCE_INTRINSICS
1006 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1007 {
1008  return rte_atomic64_add_return(v, 1) == 0;
1009 }
1010 #endif
1011 
1023 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1024 
1025 #ifdef RTE_FORCE_INTRINSICS
1026 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1027 {
1028  return rte_atomic64_sub_return(v, 1) == 0;
1029 }
1030 #endif
1031 
1043 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1044 
1045 #ifdef RTE_FORCE_INTRINSICS
1046 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1047 {
1048  return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1049 }
1050 #endif
1051 
1058 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1059 
1060 #ifdef RTE_FORCE_INTRINSICS
1061 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1062 {
1063  rte_atomic64_set(v, 0);
1064 }
1065 #endif
1066 
1067 /*------------------------ 128 bit atomic operations -------------------------*/
1068 
1073 typedef struct {
1074  RTE_STD_C11
1075  union {
1076  uint64_t val[2];
1077 #ifdef RTE_ARCH_64
1078  __extension__ __int128 int128;
1079 #endif
1080  };
1081 } __rte_aligned(16) rte_int128_t;
1082 
1083 #ifdef __DOXYGEN__
1084 
1124 __rte_experimental
1125 static inline int
1126 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1127  rte_int128_t *exp,
1128  const rte_int128_t *src,
1129  unsigned int weak,
1130  int success,
1131  int failure);
1132 
1133 #endif /* __DOXYGEN__ */
1134 
1135 #endif /* _RTE_ATOMIC_H_ */
static void rte_atomic32_inc(rte_atomic32_t *v)
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_smp_rmb(void)
static void rte_mb(void)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition: rte_atomic.h:246
static void rte_wmb(void)
volatile int16_t cnt
Definition: rte_atomic.h:217
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:553
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:361
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:567
static void rte_atomic32_dec(rte_atomic32_t *v)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static void rte_io_mb(void)
static void rte_atomic64_inc(rte_atomic64_t *v)
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition: rte_atomic.h:539
volatile int32_t cnt
Definition: rte_atomic.h:496
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:620
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:274
static void rte_atomic32_init(rte_atomic32_t *v)
Definition: rte_atomic.h:511
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition: rte_atomic.h:432
volatile int64_t cnt
Definition: rte_atomic.h:774
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:288
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition: rte_atomic.h:260
static void rte_io_wmb(void)
static void rte_atomic64_clear(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:341
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_smp_wmb(void)
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition: rte_atomic.h:711
static void rte_atomic16_inc(rte_atomic16_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static void rte_atomic_thread_fence(int memorder)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
#define RTE_STD_C11
Definition: rte_common.h:39
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:640
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_smp_mb(void)
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_atomic16_dec(rte_atomic16_t *v)
static void rte_atomic64_init(rte_atomic64_t *v)
static void rte_atomic64_dec(rte_atomic64_t *v)
static void rte_atomic16_init(rte_atomic16_t *v)
Definition: rte_atomic.h:232
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition: rte_atomic.h:525
static __rte_experimental int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static void rte_io_rmb(void)
#define __rte_aligned(a)
Definition: rte_common.h:68