10 #ifndef _RTE_RING_RTS_ELEM_PVT_H_ 11 #define _RTE_RING_RTS_ELEM_PVT_H_ 25 __rte_ring_rts_update_tail(
struct rte_ring_rts_headtail *ht)
27 union __rte_ring_rts_poscnt h, ot, nt;
34 ot.raw = rte_atomic_load_explicit(&ht->tail.raw, rte_memory_order_acquire);
38 h.raw = rte_atomic_load_explicit(&ht->head.raw, rte_memory_order_relaxed);
41 if (++nt.val.cnt == h.val.cnt)
42 nt.val.pos = h.val.pos;
44 }
while (rte_atomic_compare_exchange_strong_explicit(&ht->tail.raw,
45 (uint64_t *)(uintptr_t)&ot.raw, nt.raw,
46 rte_memory_order_release, rte_memory_order_acquire) == 0);
54 __rte_ring_rts_head_wait(
const struct rte_ring_rts_headtail *ht,
55 union __rte_ring_rts_poscnt *h)
61 while (h->val.pos - ht->tail.val.pos > max) {
63 h->raw = rte_atomic_load_explicit(&ht->head.raw, rte_memory_order_acquire);
71 __rte_ring_rts_move_prod_head(
struct rte_ring *r, uint32_t num,
73 uint32_t *free_entries)
76 union __rte_ring_rts_poscnt nh, oh;
78 const uint32_t capacity = r->
capacity;
80 oh.raw = rte_atomic_load_explicit(&r->rts_prod.head.raw, rte_memory_order_acquire);
91 __rte_ring_rts_head_wait(&r->rts_prod, &oh);
99 *free_entries = capacity + r->cons.tail - oh.val.pos;
109 nh.val.pos = oh.val.pos + n;
110 nh.val.cnt = oh.val.cnt + 1;
117 }
while (rte_atomic_compare_exchange_strong_explicit(&r->rts_prod.head.raw,
118 (uint64_t *)(uintptr_t)&oh.raw, nh.raw,
119 rte_memory_order_acquire, rte_memory_order_acquire) == 0);
121 *old_head = oh.val.pos;
129 __rte_ring_rts_move_cons_head(
struct rte_ring *r, uint32_t num,
134 union __rte_ring_rts_poscnt nh, oh;
136 oh.raw = rte_atomic_load_explicit(&r->rts_cons.head.raw, rte_memory_order_acquire);
148 __rte_ring_rts_head_wait(&r->rts_cons, &oh);
155 *entries = r->prod.tail - oh.val.pos;
164 nh.val.pos = oh.val.pos + n;
165 nh.val.cnt = oh.val.cnt + 1;
172 }
while (rte_atomic_compare_exchange_strong_explicit(&r->rts_cons.head.raw,
173 (uint64_t *)(uintptr_t)&oh.raw, nh.raw,
174 rte_memory_order_acquire, rte_memory_order_acquire) == 0);
176 *old_head = oh.val.pos;
203 __rte_ring_do_rts_enqueue_elem(
struct rte_ring *r,
const void *obj_table,
205 uint32_t *free_space)
209 n = __rte_ring_rts_move_prod_head(r, n, behavior, &head, &free);
212 __rte_ring_enqueue_elems(r, head, obj_table, esize, n);
213 __rte_ring_rts_update_tail(&r->rts_prod);
216 if (free_space != NULL)
217 *free_space = free - n;
244 __rte_ring_do_rts_dequeue_elem(
struct rte_ring *r,
void *obj_table,
248 uint32_t entries, head;
250 n = __rte_ring_rts_move_cons_head(r, n, behavior, &head, &entries);
253 __rte_ring_dequeue_elems(r, head, obj_table, esize, n);
254 __rte_ring_rts_update_tail(&r->rts_cons);
257 if (available != NULL)
258 *available = entries - n;
#define __rte_always_inline
static void rte_pause(void)