DPDK 24.11.1
rte_atomic.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
3 */
4
5#ifndef _RTE_ATOMIC_H_
6#define _RTE_ATOMIC_H_
7
15#include <stdint.h>
16
17#include <rte_common.h>
18#include <rte_stdatomic.h>
19
20#ifdef __cplusplus
21extern "C" {
22#endif
23
24#ifdef __DOXYGEN__
25
29
35static inline void rte_mb(void);
36
43static inline void rte_wmb(void);
44
51static inline void rte_rmb(void);
53
57
72static inline void rte_smp_mb(void);
73
92static inline void rte_smp_wmb(void);
93
112static inline void rte_smp_rmb(void);
114
118
125static inline void rte_io_mb(void);
126
134static inline void rte_io_wmb(void);
135
143static inline void rte_io_rmb(void);
145
146#endif /* __DOXYGEN__ */
147
154#ifdef RTE_TOOLCHAIN_MSVC
155#define rte_compiler_barrier() _ReadWriteBarrier()
156#else
157#define rte_compiler_barrier() do { \
158 asm volatile ("" : : : "memory"); \
159} while(0)
160#endif
161
165static inline void rte_atomic_thread_fence(rte_memory_order memorder);
166
167/*------------------------- 16 bit atomic operations -------------------------*/
168
169#ifndef RTE_TOOLCHAIN_MSVC
170
187static inline int
188rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
189
190#ifdef RTE_FORCE_INTRINSICS
191static inline int
192rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
193{
194 return __sync_bool_compare_and_swap(dst, exp, src);
195}
196#endif
197
213static inline uint16_t
214rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
215
216#ifdef RTE_FORCE_INTRINSICS
217static inline uint16_t
218rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
219{
220 return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
221}
222#endif
223
227typedef struct {
228 volatile int16_t cnt;
230
234#define RTE_ATOMIC16_INIT(val) { (val) }
235
242static inline void
244{
245 v->cnt = 0;
246}
247
256static inline int16_t
258{
259 return v->cnt;
260}
261
270static inline void
271rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
272{
273 v->cnt = new_value;
274}
275
284static inline void
286{
287 rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, inc,
288 rte_memory_order_seq_cst);
289}
290
299static inline void
301{
302 rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, dec,
303 rte_memory_order_seq_cst);
304}
305
312static inline void
314
315#ifdef RTE_FORCE_INTRINSICS
316static inline void
318{
319 rte_atomic16_add(v, 1);
320}
321#endif
322
329static inline void
331
332#ifdef RTE_FORCE_INTRINSICS
333static inline void
335{
336 rte_atomic16_sub(v, 1);
337}
338#endif
339
353static inline int16_t
355{
356 return rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, inc,
357 rte_memory_order_seq_cst) + inc;
358}
359
374static inline int16_t
376{
377 return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, dec,
378 rte_memory_order_seq_cst) - dec;
379}
380
393
394#ifdef RTE_FORCE_INTRINSICS
395static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
396{
397 return rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, 1,
398 rte_memory_order_seq_cst) + 1 == 0;
399}
400#endif
401
414
415#ifdef RTE_FORCE_INTRINSICS
416static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
417{
418 return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, 1,
419 rte_memory_order_seq_cst) - 1 == 0;
420}
421#endif
422
435
436#ifdef RTE_FORCE_INTRINSICS
437static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
438{
439 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
440}
441#endif
442
449static inline void rte_atomic16_clear(rte_atomic16_t *v)
450{
451 v->cnt = 0;
452}
453
454/*------------------------- 32 bit atomic operations -------------------------*/
455
472static inline int
473rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
474
475#ifdef RTE_FORCE_INTRINSICS
476static inline int
477rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
478{
479 return __sync_bool_compare_and_swap(dst, exp, src);
480}
481#endif
482
498static inline uint32_t
499rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
500
501#ifdef RTE_FORCE_INTRINSICS
502static inline uint32_t
503rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
504{
505 return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
506}
507#endif
508
512typedef struct {
513 volatile int32_t cnt;
515
519#define RTE_ATOMIC32_INIT(val) { (val) }
520
527static inline void
529{
530 v->cnt = 0;
531}
532
541static inline int32_t
543{
544 return v->cnt;
545}
546
555static inline void
556rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
557{
558 v->cnt = new_value;
559}
560
569static inline void
571{
572 rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, inc,
573 rte_memory_order_seq_cst);
574}
575
584static inline void
586{
587 rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, dec,
588 rte_memory_order_seq_cst);
589}
590
597static inline void
599
600#ifdef RTE_FORCE_INTRINSICS
601static inline void
603{
604 rte_atomic32_add(v, 1);
605}
606#endif
607
614static inline void
616
617#ifdef RTE_FORCE_INTRINSICS
618static inline void
620{
621 rte_atomic32_sub(v,1);
622}
623#endif
624
638static inline int32_t
640{
641 return rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, inc,
642 rte_memory_order_seq_cst) + inc;
643}
644
659static inline int32_t
661{
662 return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, dec,
663 rte_memory_order_seq_cst) - dec;
664}
665
678
679#ifdef RTE_FORCE_INTRINSICS
680static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
681{
682 return rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, 1,
683 rte_memory_order_seq_cst) + 1 == 0;
684}
685#endif
686
699
700#ifdef RTE_FORCE_INTRINSICS
701static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
702{
703 return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, 1,
704 rte_memory_order_seq_cst) - 1 == 0;
705}
706#endif
707
720
721#ifdef RTE_FORCE_INTRINSICS
722static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
723{
724 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
725}
726#endif
727
734static inline void rte_atomic32_clear(rte_atomic32_t *v)
735{
736 v->cnt = 0;
737}
738
739/*------------------------- 64 bit atomic operations -------------------------*/
740
756static inline int
757rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
758
759#ifdef RTE_FORCE_INTRINSICS
760static inline int
761rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
762{
763 return __sync_bool_compare_and_swap(dst, exp, src);
764}
765#endif
766
782static inline uint64_t
783rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
784
785#ifdef RTE_FORCE_INTRINSICS
786static inline uint64_t
787rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
788{
789 return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
790}
791#endif
792
796typedef struct {
797 volatile int64_t cnt;
799
803#define RTE_ATOMIC64_INIT(val) { (val) }
804
811static inline void
813
814#ifdef RTE_FORCE_INTRINSICS
815static inline void
817{
818#ifdef __LP64__
819 v->cnt = 0;
820#else
821 int success = 0;
822 uint64_t tmp;
823
824 while (success == 0) {
825 tmp = v->cnt;
826 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
827 tmp, 0);
828 }
829#endif
830}
831#endif
832
841static inline int64_t
843
844#ifdef RTE_FORCE_INTRINSICS
845static inline int64_t
847{
848#ifdef __LP64__
849 return v->cnt;
850#else
851 int success = 0;
852 uint64_t tmp;
853
854 while (success == 0) {
855 tmp = v->cnt;
856 /* replace the value by itself */
857 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
858 tmp, tmp);
859 }
860 return tmp;
861#endif
862}
863#endif
864
873static inline void
874rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
875
876#ifdef RTE_FORCE_INTRINSICS
877static inline void
878rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
879{
880#ifdef __LP64__
881 v->cnt = new_value;
882#else
883 int success = 0;
884 uint64_t tmp;
885
886 while (success == 0) {
887 tmp = v->cnt;
888 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
889 tmp, new_value);
890 }
891#endif
892}
893#endif
894
903static inline void
905
906#ifdef RTE_FORCE_INTRINSICS
907static inline void
908rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
909{
910 rte_atomic_fetch_add_explicit((volatile __rte_atomic int64_t *)&v->cnt, inc,
911 rte_memory_order_seq_cst);
912}
913#endif
914
923static inline void
925
926#ifdef RTE_FORCE_INTRINSICS
927static inline void
928rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
929{
930 rte_atomic_fetch_sub_explicit((volatile __rte_atomic int64_t *)&v->cnt, dec,
931 rte_memory_order_seq_cst);
932}
933#endif
934
941static inline void
943
944#ifdef RTE_FORCE_INTRINSICS
945static inline void
947{
948 rte_atomic64_add(v, 1);
949}
950#endif
951
958static inline void
960
961#ifdef RTE_FORCE_INTRINSICS
962static inline void
964{
965 rte_atomic64_sub(v, 1);
966}
967#endif
968
982static inline int64_t
984
985#ifdef RTE_FORCE_INTRINSICS
986static inline int64_t
988{
989 return rte_atomic_fetch_add_explicit((volatile __rte_atomic int64_t *)&v->cnt, inc,
990 rte_memory_order_seq_cst) + inc;
991}
992#endif
993
1007static inline int64_t
1009
1010#ifdef RTE_FORCE_INTRINSICS
1011static inline int64_t
1013{
1014 return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int64_t *)&v->cnt, dec,
1015 rte_memory_order_seq_cst) - dec;
1016}
1017#endif
1018
1031
1032#ifdef RTE_FORCE_INTRINSICS
1033static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1034{
1035 return rte_atomic64_add_return(v, 1) == 0;
1036}
1037#endif
1038
1051
1052#ifdef RTE_FORCE_INTRINSICS
1053static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1054{
1055 return rte_atomic64_sub_return(v, 1) == 0;
1056}
1057#endif
1058
1071
1072#ifdef RTE_FORCE_INTRINSICS
1073static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1074{
1075 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1076}
1077#endif
1078
1085static inline void rte_atomic64_clear(rte_atomic64_t *v);
1086
1087#ifdef RTE_FORCE_INTRINSICS
1088static inline void rte_atomic64_clear(rte_atomic64_t *v)
1089{
1090 rte_atomic64_set(v, 0);
1091}
1092#endif
1093
1094#endif
1095
1096/*------------------------ 128 bit atomic operations -------------------------*/
1097
1101typedef struct __rte_aligned(16) {
1102 union {
1103 uint64_t val[2];
1104#ifdef RTE_ARCH_64
1105#ifndef RTE_TOOLCHAIN_MSVC
1106 __extension__ __int128 int128;
1107#endif
1108#endif
1109 };
1110} rte_int128_t;
1111
1112#ifdef __DOXYGEN__
1113
1153static inline int
1155 rte_int128_t *exp,
1156 const rte_int128_t *src,
1157 unsigned int weak,
1158 int success,
1159 int failure);
1160
1161#endif /* __DOXYGEN__ */
1162
1163#ifdef __cplusplus
1164}
1165#endif
1166
1167#endif /* _RTE_ATOMIC_H_ */
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static void rte_atomic16_dec(rte_atomic16_t *v)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
static int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static void rte_io_rmb(void)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition: rte_atomic.h:734
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_io_mb(void)
static void rte_io_wmb(void)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_atomic64_clear(rte_atomic64_t *v)
static void rte_smp_mb(void)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:375
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition: rte_atomic.h:271
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition: rte_atomic.h:449
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:285
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_mb(void)
static void rte_atomic32_inc(rte_atomic32_t *v)
static void rte_smp_wmb(void)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition: rte_atomic.h:300
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_atomic32_dec(rte_atomic32_t *v)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static void rte_atomic_thread_fence(rte_memory_order memorder)
static void rte_atomic64_dec(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:660
static void rte_atomic16_init(rte_atomic16_t *v)
Definition: rte_atomic.h:243
static void rte_smp_rmb(void)
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
struct __rte_aligned(16)
Definition: rte_atomic.h:1101
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition: rte_atomic.h:542
static void rte_wmb(void)
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:570
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition: rte_atomic.h:556
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition: rte_atomic.h:585
static void rte_atomic64_inc(rte_atomic64_t *v)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition: rte_atomic.h:257
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_atomic64_init(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition: rte_atomic.h:354
static void rte_atomic32_init(rte_atomic32_t *v)
Definition: rte_atomic.h:528
static void rte_atomic16_inc(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition: rte_atomic.h:639
volatile int16_t cnt
Definition: rte_atomic.h:228
volatile int32_t cnt
Definition: rte_atomic.h:513
volatile int64_t cnt
Definition: rte_atomic.h:797