DPDK  19.02.0
rte_atomic.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
15 #include <stdint.h>
16 #include <rte_common.h>
17 
18 #ifdef __DOXYGEN__
19 
22 
23 
30 static inline void rte_mb(void);
31 
39 static inline void rte_wmb(void);
40 
48 static inline void rte_rmb(void);
50 
53 
54 
61 static inline void rte_smp_mb(void);
62 
70 static inline void rte_smp_wmb(void);
71 
79 static inline void rte_smp_rmb(void);
81 
84 
85 
92 static inline void rte_io_mb(void);
93 
101 static inline void rte_io_wmb(void);
102 
110 static inline void rte_io_rmb(void);
112 
132 
133 
140 static inline void rte_cio_wmb(void);
141 
149 static inline void rte_cio_rmb(void);
151 
152 #endif /* __DOXYGEN__ */
153 
160 #define rte_compiler_barrier() do { \
161  asm volatile ("" : : : "memory"); \
162 } while(0)
163 
164 /*------------------------- 16 bit atomic operations -------------------------*/
165 
182 static inline int
183 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
184 
185 #ifdef RTE_FORCE_INTRINSICS
186 static inline int
187 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
188 {
189  return __sync_bool_compare_and_swap(dst, exp, src);
190 }
191 #endif
192 
208 static inline uint16_t
209 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
210 
211 #ifdef RTE_FORCE_INTRINSICS
212 static inline uint16_t
213 rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
214 {
215 #if defined(__clang__)
216  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
217 #else
218  return __atomic_exchange_2(dst, val, __ATOMIC_SEQ_CST);
219 #endif
220 }
221 #endif
222 
226 typedef struct {
227  volatile int16_t cnt;
229 
233 #define RTE_ATOMIC16_INIT(val) { (val) }
234 
241 static inline void
243 {
244  v->cnt = 0;
245 }
246 
255 static inline int16_t
257 {
258  return v->cnt;
259 }
260 
269 static inline void
270 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
271 {
272  v->cnt = new_value;
273 }
274 
283 static inline void
285 {
286  __sync_fetch_and_add(&v->cnt, inc);
287 }
288 
297 static inline void
299 {
300  __sync_fetch_and_sub(&v->cnt, dec);
301 }
302 
309 static inline void
311 
312 #ifdef RTE_FORCE_INTRINSICS
313 static inline void
315 {
316  rte_atomic16_add(v, 1);
317 }
318 #endif
319 
326 static inline void
328 
329 #ifdef RTE_FORCE_INTRINSICS
330 static inline void
332 {
333  rte_atomic16_sub(v, 1);
334 }
335 #endif
336 
350 static inline int16_t
352 {
353  return __sync_add_and_fetch(&v->cnt, inc);
354 }
355 
370 static inline int16_t
372 {
373  return __sync_sub_and_fetch(&v->cnt, dec);
374 }
375 
387 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
388 
389 #ifdef RTE_FORCE_INTRINSICS
390 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
391 {
392  return __sync_add_and_fetch(&v->cnt, 1) == 0;
393 }
394 #endif
395 
407 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
408 
409 #ifdef RTE_FORCE_INTRINSICS
410 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
411 {
412  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
413 }
414 #endif
415 
427 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
428 
429 #ifdef RTE_FORCE_INTRINSICS
430 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
431 {
432  return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
433 }
434 #endif
435 
442 static inline void rte_atomic16_clear(rte_atomic16_t *v)
443 {
444  v->cnt = 0;
445 }
446 
447 /*------------------------- 32 bit atomic operations -------------------------*/
448 
465 static inline int
466 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
467 
468 #ifdef RTE_FORCE_INTRINSICS
469 static inline int
470 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
471 {
472  return __sync_bool_compare_and_swap(dst, exp, src);
473 }
474 #endif
475 
491 static inline uint32_t
492 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
493 
494 #ifdef RTE_FORCE_INTRINSICS
495 static inline uint32_t
496 rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
497 {
498 #if defined(__clang__)
499  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
500 #else
501  return __atomic_exchange_4(dst, val, __ATOMIC_SEQ_CST);
502 #endif
503 }
504 #endif
505 
509 typedef struct {
510  volatile int32_t cnt;
512 
516 #define RTE_ATOMIC32_INIT(val) { (val) }
517 
524 static inline void
526 {
527  v->cnt = 0;
528 }
529 
538 static inline int32_t
540 {
541  return v->cnt;
542 }
543 
552 static inline void
553 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
554 {
555  v->cnt = new_value;
556 }
557 
566 static inline void
568 {
569  __sync_fetch_and_add(&v->cnt, inc);
570 }
571 
580 static inline void
582 {
583  __sync_fetch_and_sub(&v->cnt, dec);
584 }
585 
592 static inline void
594 
595 #ifdef RTE_FORCE_INTRINSICS
596 static inline void
598 {
599  rte_atomic32_add(v, 1);
600 }
601 #endif
602 
609 static inline void
611 
612 #ifdef RTE_FORCE_INTRINSICS
613 static inline void
615 {
616  rte_atomic32_sub(v,1);
617 }
618 #endif
619 
633 static inline int32_t
635 {
636  return __sync_add_and_fetch(&v->cnt, inc);
637 }
638 
653 static inline int32_t
655 {
656  return __sync_sub_and_fetch(&v->cnt, dec);
657 }
658 
670 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
671 
672 #ifdef RTE_FORCE_INTRINSICS
673 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
674 {
675  return __sync_add_and_fetch(&v->cnt, 1) == 0;
676 }
677 #endif
678 
690 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
691 
692 #ifdef RTE_FORCE_INTRINSICS
693 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
694 {
695  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
696 }
697 #endif
698 
710 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
711 
712 #ifdef RTE_FORCE_INTRINSICS
713 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
714 {
715  return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
716 }
717 #endif
718 
725 static inline void rte_atomic32_clear(rte_atomic32_t *v)
726 {
727  v->cnt = 0;
728 }
729 
730 /*------------------------- 64 bit atomic operations -------------------------*/
731 
747 static inline int
748 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
749 
750 #ifdef RTE_FORCE_INTRINSICS
751 static inline int
752 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
753 {
754  return __sync_bool_compare_and_swap(dst, exp, src);
755 }
756 #endif
757 
773 static inline uint64_t
774 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
775 
776 #ifdef RTE_FORCE_INTRINSICS
777 static inline uint64_t
778 rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
779 {
780 #if defined(__clang__)
781  return __atomic_exchange_n(dst, val, __ATOMIC_SEQ_CST);
782 #else
783  return __atomic_exchange_8(dst, val, __ATOMIC_SEQ_CST);
784 #endif
785 }
786 #endif
787 
791 typedef struct {
792  volatile int64_t cnt;
794 
798 #define RTE_ATOMIC64_INIT(val) { (val) }
799 
806 static inline void
808 
809 #ifdef RTE_FORCE_INTRINSICS
810 static inline void
812 {
813 #ifdef __LP64__
814  v->cnt = 0;
815 #else
816  int success = 0;
817  uint64_t tmp;
818 
819  while (success == 0) {
820  tmp = v->cnt;
821  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
822  tmp, 0);
823  }
824 #endif
825 }
826 #endif
827 
836 static inline int64_t
838 
839 #ifdef RTE_FORCE_INTRINSICS
840 static inline int64_t
842 {
843 #ifdef __LP64__
844  return v->cnt;
845 #else
846  int success = 0;
847  uint64_t tmp;
848 
849  while (success == 0) {
850  tmp = v->cnt;
851  /* replace the value by itself */
852  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
853  tmp, tmp);
854  }
855  return tmp;
856 #endif
857 }
858 #endif
859 
868 static inline void
869 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
870 
871 #ifdef RTE_FORCE_INTRINSICS
872 static inline void
873 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
874 {
875 #ifdef __LP64__
876  v->cnt = new_value;
877 #else
878  int success = 0;
879  uint64_t tmp;
880 
881  while (success == 0) {
882  tmp = v->cnt;
883  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
884  tmp, new_value);
885  }
886 #endif
887 }
888 #endif
889 
898 static inline void
899 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
900 
901 #ifdef RTE_FORCE_INTRINSICS
902 static inline void
903 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
904 {
905  __sync_fetch_and_add(&v->cnt, inc);
906 }
907 #endif
908 
917 static inline void
918 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
919 
920 #ifdef RTE_FORCE_INTRINSICS
921 static inline void
922 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
923 {
924  __sync_fetch_and_sub(&v->cnt, dec);
925 }
926 #endif
927 
934 static inline void
936 
937 #ifdef RTE_FORCE_INTRINSICS
938 static inline void
940 {
941  rte_atomic64_add(v, 1);
942 }
943 #endif
944 
951 static inline void
953 
954 #ifdef RTE_FORCE_INTRINSICS
955 static inline void
957 {
958  rte_atomic64_sub(v, 1);
959 }
960 #endif
961 
975 static inline int64_t
976 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
977 
978 #ifdef RTE_FORCE_INTRINSICS
979 static inline int64_t
981 {
982  return __sync_add_and_fetch(&v->cnt, inc);
983 }
984 #endif
985 
999 static inline int64_t
1000 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
1001 
1002 #ifdef RTE_FORCE_INTRINSICS
1003 static inline int64_t
1004 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
1005 {
1006  return __sync_sub_and_fetch(&v->cnt, dec);
1007 }
1008 #endif
1009 
1021 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
1022 
1023 #ifdef RTE_FORCE_INTRINSICS
1024 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1025 {
1026  return rte_atomic64_add_return(v, 1) == 0;
1027 }
1028 #endif
1029 
1041 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
1042 
1043 #ifdef RTE_FORCE_INTRINSICS
1044 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1045 {
1046  return rte_atomic64_sub_return(v, 1) == 0;
1047 }
1048 #endif
1049 
1061 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
1062 
1063 #ifdef RTE_FORCE_INTRINSICS
1064 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1065 {
1066  return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1067 }
1068 #endif
1069 
1076 static inline void rte_atomic64_clear(rte_atomic64_t *v);
1077 
1078 #ifdef RTE_FORCE_INTRINSICS
1079 static inline void rte_atomic64_clear(rte_atomic64_t *v)
1080 {
1081  rte_atomic64_set(v, 0);
1082 }
1083 #endif
1084 
1085 #endif /* _RTE_ATOMIC_H_ */