DPDK  20.11.3
rte_atomic.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
15 #include <stdint.h>
16 #include <rte_common.h>
17 
18 #ifdef __DOXYGEN__
19 
22 
29 static inline void rte_mb(void);
30 
37 static inline void rte_wmb(void);
38 
45 static inline void rte_rmb(void);
47 
50 
58 static inline void rte_smp_mb(void);
59 
67 static inline void rte_smp_wmb(void);
68 
76 static inline void rte_smp_rmb(void);
78 
81 
89 static inline void rte_io_mb(void);
90 
98 static inline void rte_io_wmb(void);
99 
107 static inline void rte_io_rmb(void);
109 
110 #endif /* __DOXYGEN__ */
111 
118 #define rte_compiler_barrier() do { \
119  asm volatile ("" : : : "memory"); \
120 } while(0)
121 
125 static inline void rte_atomic_thread_fence(int memorder);
126 
127 /*------------------------- 16 bit atomic operations -------------------------*/
128 
145 static inline int
146 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
147 
148 #ifdef RTE_FORCE_INTRINSICS
149 static inline int
150 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
151 {
152  return __sync_bool_compare_and_swap(dst, exp, src);
153 }
154 #endif
155 
171 static inline uint16_t
172 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
173 
174 #ifdef RTE_FORCE_INTRINSICS
175 static inline uint16_t
176 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
177 {
178 #if defined(__clang__)
179  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
180 #else
181  return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
182 #endif
183 }
184 #endif
185 
189 typedef struct {
190  volatile int16_t cnt;
192 
196 #define RTE_ATOMIC16_INIT(val) { (val) }
197 
204 static inline void
206 {
207  v->cnt = 0;
208 }
209 
218 static inline int16_t
220 {
221  return v->cnt;
222 }
223 
232 static inline void
233 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
234 {
235  v->cnt = new_value;
236 }
237 
246 static inline void
248 {
249  __sync_fetch_and_add(&v->cnt, inc);
250 }
251 
260 static inline void
262 {
263  __sync_fetch_and_sub(&v->cnt, dec);
264 }
265 
272 static inline void
274 
275 #ifdef RTE_FORCE_INTRINSICS
276 static inline void
278 {
279  rte_atomic16_add(v, 1);
280 }
281 #endif
282 
289 static inline void
291 
292 #ifdef RTE_FORCE_INTRINSICS
293 static inline void
295 {
296  rte_atomic16_sub(v, 1);
297 }
298 #endif
299 
313 static inline int16_t
315 {
316  return __sync_add_and_fetch(&v->cnt, inc);
317 }
318 
333 static inline int16_t
335 {
336  return __sync_sub_and_fetch(&v->cnt, dec);
337 }
338 
350 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
351 
352 #ifdef RTE_FORCE_INTRINSICS
353 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
354 {
355  return __sync_add_and_fetch(&v->cnt, 1) == 0;
356 }
357 #endif
358 
370 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
371 
372 #ifdef RTE_FORCE_INTRINSICS
373 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
374 {
375  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
376 }
377 #endif
378 
390 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
391 
392 #ifdef RTE_FORCE_INTRINSICS
393 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
394 {
395  return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
396 }
397 #endif
398 
405 static inline void rte_atomic16_clear(rte_atomic16_t *v)
406 {
407  v->cnt = 0;
408 }
409 
410 /*------------------------- 32 bit atomic operations -------------------------*/
411 
428 static inline int
429 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
430 
431 #ifdef RTE_FORCE_INTRINSICS
432 static inline int
433 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
434 {
435  return __sync_bool_compare_and_swap(dst, exp, src);
436 }
437 #endif
438 
454 static inline uint32_t
455 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
456 
457 #ifdef RTE_FORCE_INTRINSICS
458 static inline uint32_t
459 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
460 {
461 #if defined(__clang__)
462  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
463 #else
464  return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
465 #endif
466 }
467 #endif
468 
472 typedef struct {
473  volatile int32_t cnt;
475 
479 #define RTE_ATOMIC32_INIT(val) { (val) }
480 
487 static inline void
489 {
490  v->cnt = 0;
491 }
492 
501 static inline int32_t
503 {
504  return v->cnt;
505 }
506 
515 static inline void
516 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
517 {
518  v->cnt = new_value;
519 }
520 
529 static inline void
531 {
532  __sync_fetch_and_add(&v->cnt, inc);
533 }
534 
543 static inline void
545 {
546  __sync_fetch_and_sub(&v->cnt, dec);
547 }
548 
555 static inline void
557 
558 #ifdef RTE_FORCE_INTRINSICS
559 static inline void
561 {
562  rte_atomic32_add(v, 1);
563 }
564 #endif
565 
572 static inline void
574 
575 #ifdef RTE_FORCE_INTRINSICS
576 static inline void
578 {
579  rte_atomic32_sub(v,1);
580 }
581 #endif
582 
596 static inline int32_t
598 {
599  return __sync_add_and_fetch(&v->cnt, inc);
600 }
601 
616 static inline int32_t
618 {
619  return __sync_sub_and_fetch(&v->cnt, dec);
620 }
621 
633 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
634 
635 #ifdef RTE_FORCE_INTRINSICS
636 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
637 {
638  return __sync_add_and_fetch(&v->cnt, 1) == 0;
639 }
640 #endif
641 
653 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
654 
655 #ifdef RTE_FORCE_INTRINSICS
656 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
657 {
658  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
659 }
660 #endif
661 
673 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
674 
675 #ifdef RTE_FORCE_INTRINSICS
676 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
677 {
678  return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
679 }
680 #endif
681 
688 static inline void rte_atomic32_clear(rte_atomic32_t *v)
689 {
690  v->cnt = 0;
691 }
692 
693 /*------------------------- 64 bit atomic operations -------------------------*/
694 
710 static inline int
711 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
712 
713 #ifdef RTE_FORCE_INTRINSICS
714 static inline int
715 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
716 {
717  return __sync_bool_compare_and_swap(dst, exp, src);
718 }
719 #endif
720 
736 static inline uint64_t
737 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
738 
739 #ifdef RTE_FORCE_INTRINSICS
740 static inline uint64_t
741 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
742 {
743 #if defined(__clang__)
744  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
745 #else
746  return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
747 #endif
748 }
749 #endif
750 
754 typedef struct {
755  volatile int64_t cnt;
757 
761 #define RTE_ATOMIC64_INIT(val) { (val) }
762 
769 static inline void
771 
772 #ifdef RTE_FORCE_INTRINSICS
773 static inline void
775 {
776 #ifdef __LP64__
777  v->cnt = 0;
778 #else
779  int success = 0;
780  uint64_t tmp;
781 
782  while (success == 0) {
783  tmp = v->cnt;
784  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
785  tmp, 0);
786  }
787 #endif
788 }
789 #endif
790 
799 static inline int64_t
801 
802 #ifdef RTE_FORCE_INTRINSICS
803 static inline int64_t
805 {
806 #ifdef __LP64__
807  return v->cnt;
808 #else
809  int success = 0;
810  uint64_t tmp;
811 
812  while (success == 0) {
813  tmp = v->cnt;
814  /* replace the value by itself */
815  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
816  tmp, tmp);
817  }
818  return tmp;
819 #endif
820 }
821 #endif
822 
831 static inline void
832 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
833 
834 #ifdef RTE_FORCE_INTRINSICS
835 static inline void
836 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
837 {
838 #ifdef __LP64__
839  v->cnt = new_value;
840 #else
841  int success = 0;
842  uint64_t tmp;
843 
844  while (success == 0) {
845  tmp = v->cnt;
846  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
847  tmp, new_value);
848  }
849 #endif
850 }
851 #endif
852 
861 static inline void
862 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
863 
864 #ifdef RTE_FORCE_INTRINSICS
865 static inline void
866 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
867 {
868  __sync_fetch_and_add(&v->cnt, inc);
869 }
870 #endif
871 
880 static inline void
881 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
882 
883 #ifdef RTE_FORCE_INTRINSICS
884 static inline void
885 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
886 {
887  __sync_fetch_and_sub(&v->cnt, dec);
888 }
889 #endif
890 
897 static inline void
899 
900 #ifdef RTE_FORCE_INTRINSICS
901 static inline void
903 {
904  rte_atomic64_add(v, 1);
905 }
906 #endif
907 
914 static inline void
916 
917 #ifdef RTE_FORCE_INTRINSICS
918 static inline void
920 {
921  rte_atomic64_sub(v, 1);
922 }
923 #endif
924 
938 static inline int64_t
939 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
940 
941 #ifdef RTE_FORCE_INTRINSICS
942 static inline int64_t
944 {
945  return __sync_add_and_fetch(&v->cnt, inc);
946 }
947 #endif
948 
962 static inline int64_t
963 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
964 
965 #ifdef RTE_FORCE_INTRINSICS
966 static inline int64_t
968 {
969  return __sync_sub_and_fetch(&v->cnt, dec);
970 }
971 #endif
972 
984 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
985 
986 #ifdef RTE_FORCE_INTRINSICS
987 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
988 {
989  return rte_atomic64_add_return(v, 1) == 0;
990 }
991 #endif
992 
1004 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1005 
1006 #ifdef RTE_FORCE_INTRINSICS
1007 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1008 {
1009  return rte_atomic64_sub_return(v, 1) == 0;
1010 }
1011 #endif
1012 
1024 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1025 
1026 #ifdef RTE_FORCE_INTRINSICS
1027 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1028 {
1029  return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1030 }
1031 #endif
1032 
1039 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1040 
1041 #ifdef RTE_FORCE_INTRINSICS
1042 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1043 {
1044  rte_atomic64_set(v, 0);
1045 }
1046 #endif
1047 
1048 /*------------------------ 128 bit atomic operations -------------------------*/
1049 
1054 typedef struct {
1055  RTE_STD_C11
1056  union {
1057  uint64_t val[2];
1058 #ifdef RTE_ARCH_64
1059  __extension__ __int128 int128;
1060 #endif
1061  };
1062 } __rte_aligned(16) rte_int128_t;
1063 
1064 #ifdef __DOXYGEN__
1065 
1105 __rte_experimental
1106 static inline int
1107 rte_atomic128_cmp_exchange(rte_int128_t *dst,
1108  rte_int128_t *exp,
1109  const rte_int128_t *src,
1110  unsigned int weak,
1111  int success,
1112  int failure);
1113 
1114 #endif /* __DOXYGEN__ */
1115 
1116 #endif /* _RTE_ATOMIC_H_ */
static void rte_atomic32_inc(rte_atomic32_t *v)
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_smp_rmb(void)
static void rte_mb(void)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition: rte_atomic.h:219
static void rte_wmb(void)
volatile int16_t cnt
Definition: rte_atomic.h:190
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:530
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:334
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:544
static void rte_atomic32_dec(rte_atomic32_t *v)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static void rte_io_mb(void)
static void rte_atomic64_inc(rte_atomic64_t *v)
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition: rte_atomic.h:516
volatile int32_t cnt
Definition: rte_atomic.h:473
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:597
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:247
static void rte_atomic32_init(rte_atomic32_t *v)
Definition: rte_atomic.h:488
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition: rte_atomic.h:405
volatile int64_t cnt
Definition: rte_atomic.h:755
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:261
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition: rte_atomic.h:233
static void rte_io_wmb(void)
static void rte_atomic64_clear(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:314
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_smp_wmb(void)
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition: rte_atomic.h:688
static void rte_atomic16_inc(rte_atomic16_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static void rte_atomic_thread_fence(int memorder)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
#define RTE_STD_C11
Definition: rte_common.h:40
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:617
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_smp_mb(void)
rte_iova_t buf_iova __rte_aligned(sizeof(rte_iova_t))
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_atomic16_dec(rte_atomic16_t *v)
static void rte_atomic64_init(rte_atomic64_t *v)
static void rte_atomic64_dec(rte_atomic64_t *v)
static void rte_atomic16_init(rte_atomic16_t *v)
Definition: rte_atomic.h:205
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition: rte_atomic.h:502
static __rte_experimental int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static void rte_io_rmb(void)