DPDK  22.11.0
rte_atomic.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
15 #include <stdint.h>
16 #include <rte_compat.h>
17 #include <rte_common.h>
18 
19 #ifdef __DOXYGEN__
20 
23 
30 static inline void rte_mb(void);
31 
38 static inline void rte_wmb(void);
39 
46 static inline void rte_rmb(void);
48 
51 
59 static inline void rte_smp_mb(void);
60 
68 static inline void rte_smp_wmb(void);
69 
77 static inline void rte_smp_rmb(void);
79 
82 
90 static inline void rte_io_mb(void);
91 
99 static inline void rte_io_wmb(void);
100 
108 static inline void rte_io_rmb(void);
110 
111 #endif /* __DOXYGEN__ */
112 
119 #define rte_compiler_barrier() do { \
120  asm volatile ("" : : : "memory"); \
121 } while(0)
122 
126 static inline void rte_atomic_thread_fence(int memorder);
127 
128 /*------------------------- 16 bit atomic operations -------------------------*/
129 
146 static inline int
147 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
148 
149 #ifdef RTE_FORCE_INTRINSICS
150 static inline int
151 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
152 {
153  return __sync_bool_compare_and_swap(dst, exp, src);
154 }
155 #endif
156 
172 static inline uint16_t
173 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
174 
175 #ifdef RTE_FORCE_INTRINSICS
176 static inline uint16_t
177 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
178 {
179 #if defined(__clang__)
180  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
181 #else
182  return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
183 #endif
184 }
185 #endif
186 
190 typedef struct {
191  volatile int16_t cnt;
193 
197 #define RTE_ATOMIC16_INIT(val) { (val) }
198 
205 static inline void
207 {
208  v->cnt = 0;
209 }
210 
219 static inline int16_t
221 {
222  return v->cnt;
223 }
224 
233 static inline void
234 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
235 {
236  v->cnt = new_value;
237 }
238 
247 static inline void
249 {
250  __sync_fetch_and_add(&v->cnt, inc);
251 }
252 
261 static inline void
263 {
264  __sync_fetch_and_sub(&v->cnt, dec);
265 }
266 
273 static inline void
275 
276 #ifdef RTE_FORCE_INTRINSICS
277 static inline void
279 {
280  rte_atomic16_add(v, 1);
281 }
282 #endif
283 
290 static inline void
292 
293 #ifdef RTE_FORCE_INTRINSICS
294 static inline void
296 {
297  rte_atomic16_sub(v, 1);
298 }
299 #endif
300 
314 static inline int16_t
316 {
317  return __sync_add_and_fetch(&v->cnt, inc);
318 }
319 
334 static inline int16_t
336 {
337  return __sync_sub_and_fetch(&v->cnt, dec);
338 }
339 
351 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
352 
353 #ifdef RTE_FORCE_INTRINSICS
354 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
355 {
356  return __sync_add_and_fetch(&v->cnt, 1) == 0;
357 }
358 #endif
359 
371 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
372 
373 #ifdef RTE_FORCE_INTRINSICS
374 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
375 {
376  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
377 }
378 #endif
379 
391 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
392 
393 #ifdef RTE_FORCE_INTRINSICS
394 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
395 {
396  return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
397 }
398 #endif
399 
406 static inline void rte_atomic16_clear(rte_atomic16_t *v)
407 {
408  v->cnt = 0;
409 }
410 
411 /*------------------------- 32 bit atomic operations -------------------------*/
412 
429 static inline int
430 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
431 
432 #ifdef RTE_FORCE_INTRINSICS
433 static inline int
434 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
435 {
436  return __sync_bool_compare_and_swap(dst, exp, src);
437 }
438 #endif
439 
455 static inline uint32_t
456 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
457 
458 #ifdef RTE_FORCE_INTRINSICS
459 static inline uint32_t
460 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
461 {
462 #if defined(__clang__)
463  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
464 #else
465  return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
466 #endif
467 }
468 #endif
469 
473 typedef struct {
474  volatile int32_t cnt;
476 
480 #define RTE_ATOMIC32_INIT(val) { (val) }
481 
488 static inline void
490 {
491  v->cnt = 0;
492 }
493 
502 static inline int32_t
504 {
505  return v->cnt;
506 }
507 
516 static inline void
517 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
518 {
519  v->cnt = new_value;
520 }
521 
530 static inline void
532 {
533  __sync_fetch_and_add(&v->cnt, inc);
534 }
535 
544 static inline void
546 {
547  __sync_fetch_and_sub(&v->cnt, dec);
548 }
549 
556 static inline void
558 
559 #ifdef RTE_FORCE_INTRINSICS
560 static inline void
562 {
563  rte_atomic32_add(v, 1);
564 }
565 #endif
566 
573 static inline void
575 
576 #ifdef RTE_FORCE_INTRINSICS
577 static inline void
579 {
580  rte_atomic32_sub(v,1);
581 }
582 #endif
583 
597 static inline int32_t
599 {
600  return __sync_add_and_fetch(&v->cnt, inc);
601 }
602 
617 static inline int32_t
619 {
620  return __sync_sub_and_fetch(&v->cnt, dec);
621 }
622 
634 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
635 
636 #ifdef RTE_FORCE_INTRINSICS
637 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
638 {
639  return __sync_add_and_fetch(&v->cnt, 1) == 0;
640 }
641 #endif
642 
654 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
655 
656 #ifdef RTE_FORCE_INTRINSICS
657 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
658 {
659  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
660 }
661 #endif
662 
674 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
675 
676 #ifdef RTE_FORCE_INTRINSICS
677 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
678 {
679  return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
680 }
681 #endif
682 
689 static inline void rte_atomic32_clear(rte_atomic32_t *v)
690 {
691  v->cnt = 0;
692 }
693 
694 /*------------------------- 64 bit atomic operations -------------------------*/
695 
711 static inline int
712 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
713 
714 #ifdef RTE_FORCE_INTRINSICS
715 static inline int
716 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
717 {
718  return __sync_bool_compare_and_swap(dst, exp, src);
719 }
720 #endif
721 
737 static inline uint64_t
738 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
739 
740 #ifdef RTE_FORCE_INTRINSICS
741 static inline uint64_t
742 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
743 {
744 #if defined(__clang__)
745  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
746 #else
747  return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
748 #endif
749 }
750 #endif
751 
755 typedef struct {
756  volatile int64_t cnt;
758 
762 #define RTE_ATOMIC64_INIT(val) { (val) }
763 
770 static inline void
772 
773 #ifdef RTE_FORCE_INTRINSICS
774 static inline void
776 {
777 #ifdef __LP64__
778  v->cnt = 0;
779 #else
780  int success = 0;
781  uint64_t tmp;
782 
783  while (success == 0) {
784  tmp = v->cnt;
785  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
786  tmp, 0);
787  }
788 #endif
789 }
790 #endif
791 
800 static inline int64_t
802 
803 #ifdef RTE_FORCE_INTRINSICS
804 static inline int64_t
806 {
807 #ifdef __LP64__
808  return v->cnt;
809 #else
810  int success = 0;
811  uint64_t tmp;
812 
813  while (success == 0) {
814  tmp = v->cnt;
815  /* replace the value by itself */
816  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
817  tmp, tmp);
818  }
819  return tmp;
820 #endif
821 }
822 #endif
823 
832 static inline void
833 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
834 
835 #ifdef RTE_FORCE_INTRINSICS
836 static inline void
837 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
838 {
839 #ifdef __LP64__
840  v->cnt = new_value;
841 #else
842  int success = 0;
843  uint64_t tmp;
844 
845  while (success == 0) {
846  tmp = v->cnt;
847  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
848  tmp, new_value);
849  }
850 #endif
851 }
852 #endif
853 
862 static inline void
863 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
864 
865 #ifdef RTE_FORCE_INTRINSICS
866 static inline void
867 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
868 {
869  __sync_fetch_and_add(&v->cnt, inc);
870 }
871 #endif
872 
881 static inline void
882 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
883 
884 #ifdef RTE_FORCE_INTRINSICS
885 static inline void
886 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
887 {
888  __sync_fetch_and_sub(&v->cnt, dec);
889 }
890 #endif
891 
898 static inline void
900 
901 #ifdef RTE_FORCE_INTRINSICS
902 static inline void
904 {
905  rte_atomic64_add(v, 1);
906 }
907 #endif
908 
915 static inline void
917 
918 #ifdef RTE_FORCE_INTRINSICS
919 static inline void
921 {
922  rte_atomic64_sub(v, 1);
923 }
924 #endif
925 
939 static inline int64_t
940 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
941 
942 #ifdef RTE_FORCE_INTRINSICS
943 static inline int64_t
945 {
946  return __sync_add_and_fetch(&v->cnt, inc);
947 }
948 #endif
949 
963 static inline int64_t
964 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
965 
966 #ifdef RTE_FORCE_INTRINSICS
967 static inline int64_t
969 {
970  return __sync_sub_and_fetch(&v->cnt, dec);
971 }
972 #endif
973 
985 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
986 
987 #ifdef RTE_FORCE_INTRINSICS
988 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
989 {
990  return rte_atomic64_add_return(v, 1) == 0;
991 }
992 #endif
993 
1005 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1006 
1007 #ifdef RTE_FORCE_INTRINSICS
1008 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1009 {
1010  return rte_atomic64_sub_return(v, 1) == 0;
1011 }
1012 #endif
1013 
1025 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1026 
1027 #ifdef RTE_FORCE_INTRINSICS
1028 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1029 {
1030  return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1031 }
1032 #endif
1033 
1040 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1041 
1042 #ifdef RTE_FORCE_INTRINSICS
1043 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1044 {
1045  rte_atomic64_set(v, 0);
1046 }
1047 #endif
1048 
1049 /*------------------------ 128 bit atomic operations -------------------------*/
1050 
1055 typedef struct {
1056  RTE_STD_C11
1057  union {
1058  uint64_t val[2];
1059 #ifdef RTE_ARCH_64
1060  __extension__ __int128 int128;
1061 #endif
1062  };
1063 } __rte_aligned(16) rte_int128_t;
1064 
1065 #ifdef __DOXYGEN__
1066 
1106 __rte_experimental
1107 static inline int
1108 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1109  rte_int128_t *exp,
1110  const rte_int128_t *src,
1111  unsigned int weak,
1112  int success,
1113  int failure);
1114 
1115 #endif /* __DOXYGEN__ */
1116 
1117 #endif /* _RTE_ATOMIC_H_ */
static void rte_atomic32_inc(rte_atomic32_t *v)
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_smp_rmb(void)
static void rte_mb(void)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition: rte_atomic.h:220
static void rte_wmb(void)
volatile int16_t cnt
Definition: rte_atomic.h:191
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:531
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:335
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:545
static void rte_atomic32_dec(rte_atomic32_t *v)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static void rte_io_mb(void)
static void rte_atomic64_inc(rte_atomic64_t *v)
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition: rte_atomic.h:517
volatile int32_t cnt
Definition: rte_atomic.h:474
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:598
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:248
static void rte_atomic32_init(rte_atomic32_t *v)
Definition: rte_atomic.h:489
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition: rte_atomic.h:406
volatile int64_t cnt
Definition: rte_atomic.h:756
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:262
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition: rte_atomic.h:234
static void rte_io_wmb(void)
static void rte_atomic64_clear(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:315
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_smp_wmb(void)
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition: rte_atomic.h:689
static void rte_atomic16_inc(rte_atomic16_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static void rte_atomic_thread_fence(int memorder)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
#define RTE_STD_C11
Definition: rte_common.h:39
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:618
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_smp_mb(void)
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_atomic16_dec(rte_atomic16_t *v)
static void rte_atomic64_init(rte_atomic64_t *v)
static void rte_atomic64_dec(rte_atomic64_t *v)
static void rte_atomic16_init(rte_atomic16_t *v)
Definition: rte_atomic.h:206
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition: rte_atomic.h:503
static __rte_experimental int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static void rte_io_rmb(void)
#define __rte_aligned(a)
Definition: rte_common.h:68