5 #ifndef _RTE_MCSLOCK_H_
6 #define _RTE_MCSLOCK_H_
55 __atomic_store_n(&me->locked, 1, __ATOMIC_RELAXED);
56 __atomic_store_n(&me->next, NULL, __ATOMIC_RELAXED);
64 prev = __atomic_exchange_n(msl, me, __ATOMIC_ACQ_REL);
65 if (
likely(prev == NULL)) {
71 __atomic_store_n(&prev->next, me, __ATOMIC_RELAXED);
77 __atomic_thread_fence(__ATOMIC_ACQ_REL);
83 while (__atomic_load_n(&me->locked, __ATOMIC_ACQUIRE))
103 if (
likely(__atomic_load_n(&me->next, __ATOMIC_RELAXED) == NULL)) {
105 rte_mcslock_t *save_me = __atomic_load_n(&me, __ATOMIC_RELAXED);
108 if (
likely(__atomic_compare_exchange_n(msl, &save_me, NULL, 0,
109 __ATOMIC_RELEASE, __ATOMIC_RELAXED)))
116 __atomic_thread_fence(__ATOMIC_ACQUIRE);
120 while (__atomic_load_n(&me->next, __ATOMIC_RELAXED) == NULL)
125 __atomic_store_n(&me->next->locked, 0, __ATOMIC_RELEASE);
146 __atomic_store_n(&me->next, NULL, __ATOMIC_RELAXED);
157 return __atomic_compare_exchange_n(msl, &expected, me, 0,
158 __ATOMIC_ACQ_REL, __ATOMIC_RELAXED);
176 return (__atomic_load_n(&msl, __ATOMIC_RELAXED) != NULL);
static __rte_experimental int rte_mcslock_is_locked(rte_mcslock_t *msl)
static __rte_experimental void rte_mcslock_unlock(rte_mcslock_t **msl, rte_mcslock_t *me)
struct rte_mcslock rte_mcslock_t
static __rte_experimental int rte_mcslock_trylock(rte_mcslock_t **msl, rte_mcslock_t *me)
static void rte_pause(void)
static __rte_experimental void rte_mcslock_lock(rte_mcslock_t **msl, rte_mcslock_t *me)