DPDK  18.02.2
rte_atomic.h
Go to the documentation of this file.
1 /* SPDX-License-Identifier: BSD-3-Clause
2  * Copyright(c) 2010-2014 Intel Corporation
3  */
4 
5 #ifndef _RTE_ATOMIC_H_
6 #define _RTE_ATOMIC_H_
7 
15 #include <stdint.h>
16 #include <rte_common.h>
17 
18 #ifdef __DOXYGEN__
19 
22 
23 
30 static inline void rte_mb(void);
31 
39 static inline void rte_wmb(void);
40 
48 static inline void rte_rmb(void);
50 
53 
54 
61 static inline void rte_smp_mb(void);
62 
70 static inline void rte_smp_wmb(void);
71 
79 static inline void rte_smp_rmb(void);
81 
84 
85 
92 static inline void rte_io_mb(void);
93 
101 static inline void rte_io_wmb(void);
102 
110 static inline void rte_io_rmb(void);
112 
132 
133 
140 static inline void rte_cio_wmb(void);
141 
149 static inline void rte_cio_rmb(void);
151 
152 #endif /* __DOXYGEN__ */
153 
160 #define rte_compiler_barrier() do { \
161  asm volatile ("" : : : "memory"); \
162 } while(0)
163 
164 /*------------------------- 16 bit atomic operations -------------------------*/
165 
182 static inline int
183 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
184 
185 #ifdef RTE_FORCE_INTRINSICS
186 static inline int
187 rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
188 {
189  return __sync_bool_compare_and_swap(dst, exp, src);
190 }
191 #endif
192 
196 typedef struct {
197  volatile int16_t cnt;
199 
203 #define RTE_ATOMIC16_INIT(val) { (val) }
204 
211 static inline void
213 {
214  v->cnt = 0;
215 }
216 
225 static inline int16_t
227 {
228  return v->cnt;
229 }
230 
239 static inline void
240 rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
241 {
242  v->cnt = new_value;
243 }
244 
253 static inline void
255 {
256  __sync_fetch_and_add(&v->cnt, inc);
257 }
258 
267 static inline void
269 {
270  __sync_fetch_and_sub(&v->cnt, dec);
271 }
272 
279 static inline void
281 
282 #ifdef RTE_FORCE_INTRINSICS
283 static inline void
285 {
286  rte_atomic16_add(v, 1);
287 }
288 #endif
289 
296 static inline void
298 
299 #ifdef RTE_FORCE_INTRINSICS
300 static inline void
302 {
303  rte_atomic16_sub(v, 1);
304 }
305 #endif
306 
320 static inline int16_t
322 {
323  return __sync_add_and_fetch(&v->cnt, inc);
324 }
325 
340 static inline int16_t
342 {
343  return __sync_sub_and_fetch(&v->cnt, dec);
344 }
345 
357 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v);
358 
359 #ifdef RTE_FORCE_INTRINSICS
360 static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
361 {
362  return __sync_add_and_fetch(&v->cnt, 1) == 0;
363 }
364 #endif
365 
377 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v);
378 
379 #ifdef RTE_FORCE_INTRINSICS
380 static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
381 {
382  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
383 }
384 #endif
385 
397 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v);
398 
399 #ifdef RTE_FORCE_INTRINSICS
400 static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
401 {
402  return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
403 }
404 #endif
405 
412 static inline void rte_atomic16_clear(rte_atomic16_t *v)
413 {
414  v->cnt = 0;
415 }
416 
417 /*------------------------- 32 bit atomic operations -------------------------*/
418 
435 static inline int
436 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
437 
438 #ifdef RTE_FORCE_INTRINSICS
439 static inline int
440 rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
441 {
442  return __sync_bool_compare_and_swap(dst, exp, src);
443 }
444 #endif
445 
449 typedef struct {
450  volatile int32_t cnt;
452 
456 #define RTE_ATOMIC32_INIT(val) { (val) }
457 
464 static inline void
466 {
467  v->cnt = 0;
468 }
469 
478 static inline int32_t
480 {
481  return v->cnt;
482 }
483 
492 static inline void
493 rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
494 {
495  v->cnt = new_value;
496 }
497 
506 static inline void
508 {
509  __sync_fetch_and_add(&v->cnt, inc);
510 }
511 
520 static inline void
522 {
523  __sync_fetch_and_sub(&v->cnt, dec);
524 }
525 
532 static inline void
534 
535 #ifdef RTE_FORCE_INTRINSICS
536 static inline void
538 {
539  rte_atomic32_add(v, 1);
540 }
541 #endif
542 
549 static inline void
551 
552 #ifdef RTE_FORCE_INTRINSICS
553 static inline void
555 {
556  rte_atomic32_sub(v,1);
557 }
558 #endif
559 
573 static inline int32_t
575 {
576  return __sync_add_and_fetch(&v->cnt, inc);
577 }
578 
593 static inline int32_t
595 {
596  return __sync_sub_and_fetch(&v->cnt, dec);
597 }
598 
610 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v);
611 
612 #ifdef RTE_FORCE_INTRINSICS
613 static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
614 {
615  return __sync_add_and_fetch(&v->cnt, 1) == 0;
616 }
617 #endif
618 
630 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v);
631 
632 #ifdef RTE_FORCE_INTRINSICS
633 static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
634 {
635  return __sync_sub_and_fetch(&v->cnt, 1) == 0;
636 }
637 #endif
638 
650 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v);
651 
652 #ifdef RTE_FORCE_INTRINSICS
653 static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
654 {
655  return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
656 }
657 #endif
658 
665 static inline void rte_atomic32_clear(rte_atomic32_t *v)
666 {
667  v->cnt = 0;
668 }
669 
670 /*------------------------- 64 bit atomic operations -------------------------*/
671 
687 static inline int
688 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
689 
690 #ifdef RTE_FORCE_INTRINSICS
691 static inline int
692 rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
693 {
694  return __sync_bool_compare_and_swap(dst, exp, src);
695 }
696 #endif
697 
701 typedef struct {
702  volatile int64_t cnt;
704 
708 #define RTE_ATOMIC64_INIT(val) { (val) }
709 
716 static inline void
718 
719 #ifdef RTE_FORCE_INTRINSICS
720 static inline void
722 {
723 #ifdef __LP64__
724  v->cnt = 0;
725 #else
726  int success = 0;
727  uint64_t tmp;
728 
729  while (success == 0) {
730  tmp = v->cnt;
731  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
732  tmp, 0);
733  }
734 #endif
735 }
736 #endif
737 
746 static inline int64_t
748 
749 #ifdef RTE_FORCE_INTRINSICS
750 static inline int64_t
752 {
753 #ifdef __LP64__
754  return v->cnt;
755 #else
756  int success = 0;
757  uint64_t tmp;
758 
759  while (success == 0) {
760  tmp = v->cnt;
761  /* replace the value by itself */
762  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
763  tmp, tmp);
764  }
765  return tmp;
766 #endif
767 }
768 #endif
769 
778 static inline void
779 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
780 
781 #ifdef RTE_FORCE_INTRINSICS
782 static inline void
783 rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
784 {
785 #ifdef __LP64__
786  v->cnt = new_value;
787 #else
788  int success = 0;
789  uint64_t tmp;
790 
791  while (success == 0) {
792  tmp = v->cnt;
793  success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
794  tmp, new_value);
795  }
796 #endif
797 }
798 #endif
799 
808 static inline void
809 rte_atomic64_add(rte_atomic64_t *v, int64_t inc);
810 
811 #ifdef RTE_FORCE_INTRINSICS
812 static inline void
813 rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
814 {
815  __sync_fetch_and_add(&v->cnt, inc);
816 }
817 #endif
818 
827 static inline void
828 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec);
829 
830 #ifdef RTE_FORCE_INTRINSICS
831 static inline void
832 rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
833 {
834  __sync_fetch_and_sub(&v->cnt, dec);
835 }
836 #endif
837 
844 static inline void
846 
847 #ifdef RTE_FORCE_INTRINSICS
848 static inline void
850 {
851  rte_atomic64_add(v, 1);
852 }
853 #endif
854 
861 static inline void
863 
864 #ifdef RTE_FORCE_INTRINSICS
865 static inline void
867 {
868  rte_atomic64_sub(v, 1);
869 }
870 #endif
871 
885 static inline int64_t
886 rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc);
887 
888 #ifdef RTE_FORCE_INTRINSICS
889 static inline int64_t
891 {
892  return __sync_add_and_fetch(&v->cnt, inc);
893 }
894 #endif
895 
909 static inline int64_t
910 rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec);
911 
912 #ifdef RTE_FORCE_INTRINSICS
913 static inline int64_t
915 {
916  return __sync_sub_and_fetch(&v->cnt, dec);
917 }
918 #endif
919 
931 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v);
932 
933 #ifdef RTE_FORCE_INTRINSICS
934 static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
935 {
936  return rte_atomic64_add_return(v, 1) == 0;
937 }
938 #endif
939 
951 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v);
952 
953 #ifdef RTE_FORCE_INTRINSICS
954 static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
955 {
956  return rte_atomic64_sub_return(v, 1) == 0;
957 }
958 #endif
959 
971 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v);
972 
973 #ifdef RTE_FORCE_INTRINSICS
974 static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
975 {
976  return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
977 }
978 #endif
979 
986 static inline void rte_atomic64_clear(rte_atomic64_t *v);
987 
988 #ifdef RTE_FORCE_INTRINSICS
989 static inline void rte_atomic64_clear(rte_atomic64_t *v)
990 {
991  rte_atomic64_set(v, 0);
992 }
993 #endif
994 
995 #endif /* _RTE_ATOMIC_H_ */