30#include <rte_lock_annotations.h>
32#include <rte_stdatomic.h>
55#define RTE_RWLOCK_WAIT 0x1
56#define RTE_RWLOCK_WRITE 0x2
57#define RTE_RWLOCK_MASK (RTE_RWLOCK_WAIT | RTE_RWLOCK_WRITE)
59#define RTE_RWLOCK_READ 0x4
61typedef struct __rte_capability("rwlock") {
62 RTE_ATOMIC(int32_t) cnt;
68#define RTE_RWLOCK_INITIALIZER { 0 }
94 __rte_acquire_shared_capability(rwl)
95 __rte_no_thread_safety_analysis
101 while (rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed)
106 x = rte_atomic_fetch_add_explicit(&rwl->cnt, RTE_RWLOCK_READ,
107 rte_memory_order_acquire) + RTE_RWLOCK_READ;
110 if (
likely(!(x & RTE_RWLOCK_MASK)))
114 rte_atomic_fetch_sub_explicit(&rwl->cnt, RTE_RWLOCK_READ,
115 rte_memory_order_relaxed);
131 __rte_try_acquire_shared_capability(
false, rwl)
132 __rte_no_thread_safety_analysis
136 x = rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed);
139 if (x & RTE_RWLOCK_MASK)
143 x = rte_atomic_fetch_add_explicit(&rwl->cnt, RTE_RWLOCK_READ,
144 rte_memory_order_acquire) + RTE_RWLOCK_READ;
147 if (
unlikely(x & RTE_RWLOCK_MASK)) {
148 rte_atomic_fetch_sub_explicit(&rwl->cnt, RTE_RWLOCK_READ,
149 rte_memory_order_release);
164 __rte_release_shared_capability(rwl)
165 __rte_no_thread_safety_analysis
167 rte_atomic_fetch_sub_explicit(&rwl->cnt, RTE_RWLOCK_READ, rte_memory_order_release);
182 __rte_try_acquire_capability(
false, rwl)
183 __rte_no_thread_safety_analysis
187 x = rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed);
188 if (x < RTE_RWLOCK_WRITE &&
189 rte_atomic_compare_exchange_weak_explicit(&rwl->cnt, &x, x + RTE_RWLOCK_WRITE,
190 rte_memory_order_acquire, rte_memory_order_relaxed))
204 __rte_acquire_capability(rwl)
205 __rte_no_thread_safety_analysis
210 x = rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed);
213 if (
likely(x < RTE_RWLOCK_WRITE)) {
215 if (rte_atomic_compare_exchange_weak_explicit(&rwl->cnt, &x,
216 RTE_RWLOCK_WRITE, rte_memory_order_acquire,
217 rte_memory_order_relaxed))
224 rte_memory_order_relaxed);
227 while (rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed)
242 __rte_release_capability(rwl)
243 __rte_no_thread_safety_analysis
245 rte_atomic_fetch_sub_explicit(&rwl->cnt, RTE_RWLOCK_WRITE, rte_memory_order_release);
259 if (rte_atomic_load_explicit(&rwl->cnt, rte_memory_order_relaxed) & RTE_RWLOCK_WRITE)
280 __rte_acquire_shared_capability(rwl);
290 __rte_release_shared_capability(rwl);
307 __rte_acquire_capability(rwl);
317 __rte_release_capability(rwl);
static void rte_pause(void)
static void rte_rwlock_read_unlock_tm(rte_rwlock_t *rwl)
static int rte_rwlock_read_trylock(rte_rwlock_t *rwl) rwl) __rte_no_thread_safety_analysis
static int rte_rwlock_write_trylock(rte_rwlock_t *rwl) rwl) __rte_no_thread_safety_analysis
static void rte_rwlock_read_unlock(rte_rwlock_t *rwl) __rte_no_thread_safety_analysis
static void rte_rwlock_write_unlock_tm(rte_rwlock_t *rwl)
static void rte_rwlock_write_unlock(rte_rwlock_t *rwl) __rte_no_thread_safety_analysis
static void rte_rwlock_read_lock_tm(rte_rwlock_t *rwl)
static void rte_rwlock_read_lock(rte_rwlock_t *rwl) __rte_no_thread_safety_analysis
static void rte_rwlock_write_lock(rte_rwlock_t *rwl) __rte_no_thread_safety_analysis
static void rte_rwlock_init(rte_rwlock_t *rwl)
static void rte_rwlock_write_lock_tm(rte_rwlock_t *rwl)
static int rte_rwlock_write_is_locked(rte_rwlock_t *rwl)