5 #ifndef _RTE_MCSLOCK_H_ 6 #define _RTE_MCSLOCK_H_ 30 #include <rte_stdatomic.h> 37 RTE_ATOMIC(
int) locked;
57 rte_atomic_store_explicit(&me->locked, 1, rte_memory_order_relaxed);
58 rte_atomic_store_explicit(&me->next, NULL, rte_memory_order_relaxed);
66 prev = rte_atomic_exchange_explicit(msl, me, rte_memory_order_acq_rel);
67 if (
likely(prev == NULL)) {
80 rte_atomic_store_explicit(&prev->next, me, rte_memory_order_release);
107 if (
likely(rte_atomic_load_explicit(&me->next, rte_memory_order_relaxed) == NULL)) {
109 rte_mcslock_t *save_me = rte_atomic_load_explicit(&me, rte_memory_order_relaxed);
112 if (
likely(rte_atomic_compare_exchange_strong_explicit(msl, &save_me, NULL,
113 rte_memory_order_release, rte_memory_order_relaxed)))
124 RTE_ATOMIC(uintptr_t) *next;
125 next = (__rte_atomic uintptr_t *)&me->next;
126 RTE_WAIT_UNTIL_MASKED(next, UINTPTR_MAX, !=, 0, rte_memory_order_relaxed);
130 rte_atomic_store_explicit(&me->next->locked, 0, rte_memory_order_release);
147 rte_atomic_store_explicit(&me->next, NULL, rte_memory_order_relaxed);
158 return rte_atomic_compare_exchange_strong_explicit(msl, &expected, me,
159 rte_memory_order_acq_rel, rte_memory_order_relaxed);
173 return (rte_atomic_load_explicit(&msl, rte_memory_order_relaxed) != NULL);
static __rte_always_inline void rte_wait_until_equal_32(volatile uint32_t *addr, uint32_t expected, rte_memory_order memorder)
static int rte_mcslock_trylock(1(rte_mcslock_t *) *msl, rte_mcslock_t *me)
struct rte_mcslock rte_mcslock_t
static void rte_mcslock_lock(1(rte_mcslock_t *) *msl, rte_mcslock_t *me)
static void rte_atomic_thread_fence(rte_memory_order memorder)
static void rte_mcslock_unlock(1(rte_mcslock_t *) *msl, 1(rte_mcslock_t *) me)
static int rte_mcslock_is_locked(1(rte_mcslock_t *) msl)