33 #include <rte_lock_annotations.h> 35 #include <rte_stdatomic.h> 54 #define RTE_RWLOCK_WAIT 0x1 55 #define RTE_RWLOCK_WRITE 0x2 56 #define RTE_RWLOCK_MASK (RTE_RWLOCK_WAIT | RTE_RWLOCK_WRITE) 58 #define RTE_RWLOCK_READ 0x4 60 typedef struct __rte_lockable {
61 RTE_ATOMIC(int32_t) cnt;
67 #define RTE_RWLOCK_INITIALIZER { 0 } 93 __rte_shared_lock_function(rwl)
94 __rte_no_thread_safety_analysis
100 while (rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed)
105 x = rte_atomic_fetch_add_explicit(&rwl->cnt, RTE_RWLOCK_READ,
106 rte_memory_order_acquire) + RTE_RWLOCK_READ;
109 if (
likely(!(x & RTE_RWLOCK_MASK)))
113 rte_atomic_fetch_sub_explicit(&rwl->cnt, RTE_RWLOCK_READ,
114 rte_memory_order_relaxed);
130 __rte_shared_trylock_function(0, rwl)
131 __rte_no_thread_safety_analysis
135 x = rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed);
138 if (x & RTE_RWLOCK_MASK)
142 x = rte_atomic_fetch_add_explicit(&rwl->cnt, RTE_RWLOCK_READ,
143 rte_memory_order_acquire) + RTE_RWLOCK_READ;
146 if (
unlikely(x & RTE_RWLOCK_MASK)) {
147 rte_atomic_fetch_sub_explicit(&rwl->cnt, RTE_RWLOCK_READ,
148 rte_memory_order_release);
163 __rte_unlock_function(rwl)
164 __rte_no_thread_safety_analysis
166 rte_atomic_fetch_sub_explicit(&rwl->cnt, RTE_RWLOCK_READ, rte_memory_order_release);
181 __rte_exclusive_trylock_function(0, rwl)
182 __rte_no_thread_safety_analysis
186 x = rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed);
187 if (x < RTE_RWLOCK_WRITE &&
188 rte_atomic_compare_exchange_weak_explicit(&rwl->cnt, &x, x + RTE_RWLOCK_WRITE,
189 rte_memory_order_acquire, rte_memory_order_relaxed))
203 __rte_exclusive_lock_function(rwl)
204 __rte_no_thread_safety_analysis
209 x = rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed);
212 if (
likely(x < RTE_RWLOCK_WRITE)) {
214 if (rte_atomic_compare_exchange_weak_explicit(&rwl->cnt, &x,
215 RTE_RWLOCK_WRITE, rte_memory_order_acquire,
216 rte_memory_order_relaxed))
222 rte_atomic_fetch_or_explicit(&rwl->cnt, RTE_RWLOCK_WAIT,
223 rte_memory_order_relaxed);
226 while (rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed)
241 __rte_unlock_function(rwl)
242 __rte_no_thread_safety_analysis
244 rte_atomic_fetch_sub_explicit(&rwl->cnt, RTE_RWLOCK_WRITE, rte_memory_order_release);
258 if (rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed) & RTE_RWLOCK_WRITE)
279 __rte_shared_lock_function(rwl);
289 __rte_unlock_function(rwl);
306 __rte_exclusive_lock_function(rwl);
316 __rte_unlock_function(rwl);
static void rte_rwlock_write_lock_tm(rte_rwlock_t *rwl)
static int rte_rwlock_read_trylock(rte_rwlock_t *rwl) rwl) __rte_no_thread_safety_analysis
static int rte_rwlock_write_is_locked(rte_rwlock_t *rwl)
static void rte_rwlock_read_lock_tm(rte_rwlock_t *rwl)
static void rte_pause(void)
static void rte_rwlock_write_unlock(rte_rwlock_t *rwl) __rte_no_thread_safety_analysis
static int rte_rwlock_write_trylock(rte_rwlock_t *rwl) rwl) __rte_no_thread_safety_analysis
static void rte_rwlock_read_unlock_tm(rte_rwlock_t *rwl)
static void rte_rwlock_write_lock(rte_rwlock_t *rwl) __rte_no_thread_safety_analysis
static void rte_rwlock_init(rte_rwlock_t *rwl)
static void rte_rwlock_write_unlock_tm(rte_rwlock_t *rwl)
static void rte_rwlock_read_lock(rte_rwlock_t *rwl) __rte_no_thread_safety_analysis
static void rte_rwlock_read_unlock(rte_rwlock_t *rwl) __rte_no_thread_safety_analysis