10 #ifndef _RTE_RING_RTS_C11_MEM_H_
11 #define _RTE_RING_RTS_C11_MEM_H_
25 __rte_ring_rts_update_tail(
struct rte_ring_rts_headtail *ht)
27 union __rte_ring_rts_poscnt h, ot, nt;
34 ot.raw = __atomic_load_n(&ht->tail.raw, __ATOMIC_ACQUIRE);
38 h.raw = __atomic_load_n(&ht->head.raw, __ATOMIC_RELAXED);
41 if (++nt.val.cnt == h.val.cnt)
42 nt.val.pos = h.val.pos;
44 }
while (__atomic_compare_exchange_n(&ht->tail.raw, &ot.raw, nt.raw,
45 0, __ATOMIC_RELEASE, __ATOMIC_ACQUIRE) == 0);
53 __rte_ring_rts_head_wait(
const struct rte_ring_rts_headtail *ht,
54 union __rte_ring_rts_poscnt *h)
60 while (h->val.pos - ht->tail.val.pos > max) {
62 h->raw = __atomic_load_n(&ht->head.raw, __ATOMIC_ACQUIRE);
70 __rte_ring_rts_move_prod_head(
struct rte_ring *r, uint32_t num,
72 uint32_t *free_entries)
75 union __rte_ring_rts_poscnt nh, oh;
77 const uint32_t capacity = r->
capacity;
79 oh.raw = __atomic_load_n(&r->rts_prod.head.raw, __ATOMIC_ACQUIRE);
90 __rte_ring_rts_head_wait(&r->rts_prod, &oh);
98 *free_entries = capacity + r->cons.
tail - oh.val.pos;
108 nh.val.pos = oh.val.pos + n;
109 nh.val.cnt = oh.val.cnt + 1;
116 }
while (__atomic_compare_exchange_n(&r->rts_prod.head.raw,
118 0, __ATOMIC_ACQUIRE, __ATOMIC_ACQUIRE) == 0);
120 *old_head = oh.val.pos;
128 __rte_ring_rts_move_cons_head(
struct rte_ring *r, uint32_t num,
133 union __rte_ring_rts_poscnt nh, oh;
135 oh.raw = __atomic_load_n(&r->rts_cons.head.raw, __ATOMIC_ACQUIRE);
147 __rte_ring_rts_head_wait(&r->rts_cons, &oh);
154 *entries = r->prod.
tail - oh.val.pos;
163 nh.val.pos = oh.val.pos + n;
164 nh.val.cnt = oh.val.cnt + 1;
171 }
while (__atomic_compare_exchange_n(&r->rts_cons.head.raw,
173 0, __ATOMIC_ACQUIRE, __ATOMIC_ACQUIRE) == 0);
175 *old_head = oh.val.pos;
#define __rte_always_inline
static void rte_pause(void)