DPDK 25.03.0-rc0
rte_stdatomic.h
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2023 Microsoft Corporation
3 */
4
5#ifndef RTE_STDATOMIC_H
6#define RTE_STDATOMIC_H
7
8#include <assert.h>
9
10#ifdef RTE_ENABLE_STDATOMIC
11#ifndef _MSC_VER
12#ifdef __STDC_NO_ATOMICS__
13#error enable_stdatomic=true but atomics not supported by toolchain
14#endif
15#endif
16
17#include <stdatomic.h>
18
19/* RTE_ATOMIC(type) is provided for use as a type specifier
20 * permitting designation of an rte atomic type.
21 */
22#define RTE_ATOMIC(type) _Atomic(type)
23
24/* __rte_atomic is provided for type qualification permitting
25 * designation of an rte atomic qualified type-name.
26 */
27#define __rte_atomic _Atomic
28
29/* The memory order is an enumerated type in C11. */
30typedef memory_order rte_memory_order;
31
32#define rte_memory_order_relaxed memory_order_relaxed
33#ifdef __ATOMIC_RELAXED
34static_assert(rte_memory_order_relaxed == __ATOMIC_RELAXED,
35 "rte_memory_order_relaxed == __ATOMIC_RELAXED");
36#endif
37
38#define rte_memory_order_consume memory_order_consume
39#ifdef __ATOMIC_CONSUME
40static_assert(rte_memory_order_consume == __ATOMIC_CONSUME,
41 "rte_memory_order_consume == __ATOMIC_CONSUME");
42#endif
43
44#define rte_memory_order_acquire memory_order_acquire
45#ifdef __ATOMIC_ACQUIRE
46static_assert(rte_memory_order_acquire == __ATOMIC_ACQUIRE,
47 "rte_memory_order_acquire == __ATOMIC_ACQUIRE");
48#endif
49
50#define rte_memory_order_release memory_order_release
51#ifdef __ATOMIC_RELEASE
52static_assert(rte_memory_order_release == __ATOMIC_RELEASE,
53 "rte_memory_order_release == __ATOMIC_RELEASE");
54#endif
55
56#define rte_memory_order_acq_rel memory_order_acq_rel
57#ifdef __ATOMIC_ACQ_REL
58static_assert(rte_memory_order_acq_rel == __ATOMIC_ACQ_REL,
59 "rte_memory_order_acq_rel == __ATOMIC_ACQ_REL");
60#endif
61
62#define rte_memory_order_seq_cst memory_order_seq_cst
63#ifdef __ATOMIC_SEQ_CST
64static_assert(rte_memory_order_seq_cst == __ATOMIC_SEQ_CST,
65 "rte_memory_order_seq_cst == __ATOMIC_SEQ_CST");
66#endif
67
68#define rte_atomic_load_explicit(ptr, memorder) \
69 atomic_load_explicit(ptr, memorder)
70
71#define rte_atomic_store_explicit(ptr, val, memorder) \
72 atomic_store_explicit(ptr, val, memorder)
73
74#define rte_atomic_exchange_explicit(ptr, val, memorder) \
75 atomic_exchange_explicit(ptr, val, memorder)
76
77#define rte_atomic_compare_exchange_strong_explicit(ptr, expected, desired, \
78 succ_memorder, fail_memorder) \
79 atomic_compare_exchange_strong_explicit(ptr, expected, desired, \
80 succ_memorder, fail_memorder)
81
82#define rte_atomic_compare_exchange_weak_explicit(ptr, expected, desired, \
83 succ_memorder, fail_memorder) \
84 atomic_compare_exchange_weak_explicit(ptr, expected, desired, \
85 succ_memorder, fail_memorder)
86
87#define rte_atomic_fetch_add_explicit(ptr, val, memorder) \
88 atomic_fetch_add_explicit(ptr, val, memorder)
89
90#define rte_atomic_fetch_sub_explicit(ptr, val, memorder) \
91 atomic_fetch_sub_explicit(ptr, val, memorder)
92
93#define rte_atomic_fetch_and_explicit(ptr, val, memorder) \
94 atomic_fetch_and_explicit(ptr, val, memorder)
95
96#define rte_atomic_fetch_xor_explicit(ptr, val, memorder) \
97 atomic_fetch_xor_explicit(ptr, val, memorder)
98
99#define rte_atomic_fetch_or_explicit(ptr, val, memorder) \
100 atomic_fetch_or_explicit(ptr, val, memorder)
101
102#define rte_atomic_fetch_nand_explicit(ptr, val, memorder) \
103 atomic_fetch_nand_explicit(ptr, val, memorder)
104
105#define rte_atomic_flag_test_and_set_explicit(ptr, memorder) \
106 atomic_flag_test_and_set_explicit(ptr, memorder)
107
108#define rte_atomic_flag_clear_explicit(ptr, memorder) \
109 atomic_flag_clear_explicit(ptr, memorder)
110
111/* We provide internal macro here to allow conditional expansion
112 * in the body of the per-arch rte_atomic_thread_fence inline functions.
113 */
114#define __rte_atomic_thread_fence(memorder) \
115 atomic_thread_fence(memorder)
116
117#else /* !RTE_ENABLE_STDATOMIC */
118
119#define RTE_ATOMIC(type) type
120
121#define __rte_atomic
122
123/* The memory order is an integer type in GCC built-ins,
124 * not an enumerated type like in C11.
125 */
126typedef int rte_memory_order;
127
128#define rte_memory_order_relaxed __ATOMIC_RELAXED
129#define rte_memory_order_consume __ATOMIC_CONSUME
130#define rte_memory_order_acquire __ATOMIC_ACQUIRE
131#define rte_memory_order_release __ATOMIC_RELEASE
132#define rte_memory_order_acq_rel __ATOMIC_ACQ_REL
133#define rte_memory_order_seq_cst __ATOMIC_SEQ_CST
134
135#define rte_atomic_load_explicit(ptr, memorder) \
136 __atomic_load_n(ptr, memorder)
137
138#define rte_atomic_store_explicit(ptr, val, memorder) \
139 __atomic_store_n(ptr, val, memorder)
140
141#define rte_atomic_exchange_explicit(ptr, val, memorder) \
142 __atomic_exchange_n(ptr, val, memorder)
143
144#define rte_atomic_compare_exchange_strong_explicit(ptr, expected, desired, \
145 succ_memorder, fail_memorder) \
146 __atomic_compare_exchange_n(ptr, expected, desired, 0, \
147 succ_memorder, fail_memorder)
148
149#define rte_atomic_compare_exchange_weak_explicit(ptr, expected, desired, \
150 succ_memorder, fail_memorder) \
151 __atomic_compare_exchange_n(ptr, expected, desired, 1, \
152 succ_memorder, fail_memorder)
153
154#define rte_atomic_fetch_add_explicit(ptr, val, memorder) \
155 __atomic_fetch_add(ptr, val, memorder)
156
157#define rte_atomic_fetch_sub_explicit(ptr, val, memorder) \
158 __atomic_fetch_sub(ptr, val, memorder)
159
160#define rte_atomic_fetch_and_explicit(ptr, val, memorder) \
161 __atomic_fetch_and(ptr, val, memorder)
162
163#define rte_atomic_fetch_xor_explicit(ptr, val, memorder) \
164 __atomic_fetch_xor(ptr, val, memorder)
165
166#define rte_atomic_fetch_or_explicit(ptr, val, memorder) \
167 __atomic_fetch_or(ptr, val, memorder)
168
169#define rte_atomic_fetch_nand_explicit(ptr, val, memorder) \
170 __atomic_fetch_nand(ptr, val, memorder)
171
172#define rte_atomic_flag_test_and_set_explicit(ptr, memorder) \
173 __atomic_test_and_set(ptr, memorder)
174
175#define rte_atomic_flag_clear_explicit(ptr, memorder) \
176 __atomic_clear(ptr, memorder)
177
178/* We provide internal macro here to allow conditional expansion
179 * in the body of the per-arch rte_atomic_thread_fence inline functions.
180 */
181#define __rte_atomic_thread_fence(memorder) \
182 __atomic_thread_fence(memorder)
183
184#endif
185
186#endif /* RTE_STDATOMIC_H */