56 __atomic_store_n(&me->locked, 1, __ATOMIC_RELAXED);
57 __atomic_store_n(&me->next, NULL, __ATOMIC_RELAXED);
75 prev = __atomic_exchange_n(msl, me, __ATOMIC_ACQ_REL);
76 if (
likely(prev == NULL)) {
92 __atomic_store_n(&prev->next, me, __ATOMIC_RELEASE);
125 if (
likely(__atomic_load_n(&me->next, __ATOMIC_ACQUIRE) == NULL)) {
134 if (
likely(__atomic_compare_exchange_n(msl, &save_me, NULL, 0,
135 __ATOMIC_RELEASE, __ATOMIC_RELAXED)))
147 next = (uintptr_t *)&me->next;
148 RTE_WAIT_UNTIL_MASKED(next, UINTPTR_MAX, !=, 0, __ATOMIC_ACQUIRE);
157 __atomic_store_n(&me->next->locked, 0, __ATOMIC_RELEASE);
174 __atomic_store_n(&me->next, NULL, __ATOMIC_RELAXED);
185 return __atomic_compare_exchange_n(msl, &expected, me, 0,
186 __ATOMIC_ACQ_REL, __ATOMIC_RELAXED);
200 return (__atomic_load_n(&msl, __ATOMIC_RELAXED) != NULL);
static int rte_mcslock_trylock(rte_mcslock_t **msl, rte_mcslock_t *me)
static int rte_mcslock_is_locked(rte_mcslock_t *msl)
struct rte_mcslock rte_mcslock_t
static void rte_mcslock_unlock(rte_mcslock_t **msl, rte_mcslock_t *me)
static void rte_mcslock_lock(rte_mcslock_t **msl, rte_mcslock_t *me)
static __rte_always_inline void rte_wait_until_equal_32(volatile uint32_t *addr, uint32_t expected, int memorder)