10#ifndef _RTE_RING_RTS_ELEM_PVT_H_
11#define _RTE_RING_RTS_ELEM_PVT_H_
25__rte_ring_rts_update_tail(
struct rte_ring_rts_headtail *ht)
27 union __rte_ring_rts_poscnt h, ot, nt;
45 ot.raw = rte_atomic_load_explicit(&ht->tail.raw, rte_memory_order_acquire);
49 h.raw = rte_atomic_load_explicit(&ht->head.raw, rte_memory_order_relaxed);
52 if (++nt.val.cnt == h.val.cnt)
53 nt.val.pos = h.val.pos;
60 }
while (rte_atomic_compare_exchange_strong_explicit(&ht->tail.raw,
61 (uint64_t *)(uintptr_t)&ot.raw, nt.raw,
62 rte_memory_order_release, rte_memory_order_acquire) == 0);
70__rte_ring_rts_head_wait(const struct rte_ring_rts_headtail *ht,
73 union __rte_ring_rts_poscnt h;
74 uint32_t max = ht->htd_max;
77 h.raw = rte_atomic_load_explicit(&ht->head.raw, memorder);
79 while (h.val.pos - ht->tail.val.pos > max) {
81 h.raw = rte_atomic_load_explicit(&ht->head.raw, memorder);
91__rte_ring_rts_move_prod_head(
struct rte_ring *r, uint32_t num,
93 uint32_t *free_entries)
95 uint32_t n, cons_tail;
96 union __rte_ring_rts_poscnt nh, oh;
98 const uint32_t capacity = r->
capacity;
115 oh = __rte_ring_rts_head_wait(&r->rts_prod, rte_memory_order_acquire);
122 cons_tail = rte_atomic_load_explicit(&r->cons.tail, rte_memory_order_acquire);
130 *free_entries = capacity + cons_tail - oh.val.pos;
140 nh.val.pos = oh.val.pos + n;
141 nh.val.cnt = oh.val.cnt + 1;
151 }
while (rte_atomic_compare_exchange_strong_explicit(&r->rts_prod.head.raw,
152 (uint64_t *)(uintptr_t)&oh.raw, nh.raw,
153 rte_memory_order_release, rte_memory_order_relaxed) == 0);
155 *old_head = oh.val.pos;
163__rte_ring_rts_move_cons_head(
struct rte_ring *r, uint32_t num,
167 uint32_t n, prod_tail;
168 union __rte_ring_rts_poscnt nh, oh;
186 oh = __rte_ring_rts_head_wait(&r->rts_cons, rte_memory_order_acquire);
193 prod_tail = rte_atomic_load_explicit(&r->prod.tail, rte_memory_order_acquire);
200 *entries = prod_tail - oh.val.pos;
209 nh.val.pos = oh.val.pos + n;
210 nh.val.cnt = oh.val.cnt + 1;
220 }
while (rte_atomic_compare_exchange_strong_explicit(&r->rts_cons.head.raw,
221 (uint64_t *)(uintptr_t)&oh.raw, nh.raw,
222 rte_memory_order_release, rte_memory_order_relaxed) == 0);
224 *old_head = oh.val.pos;
251__rte_ring_do_rts_enqueue_elem(
struct rte_ring *r,
const void *obj_table,
253 uint32_t *free_space)
257 n = __rte_ring_rts_move_prod_head(r, n, behavior, &head, &free);
260 __rte_ring_enqueue_elems(r, head, obj_table, esize, n);
261 __rte_ring_rts_update_tail(&r->rts_prod);
264 if (free_space != NULL)
265 *free_space = free - n;
292__rte_ring_do_rts_dequeue_elem(
struct rte_ring *r,
void *obj_table,
296 uint32_t entries, head;
298 n = __rte_ring_rts_move_cons_head(r, n, behavior, &head, &entries);
301 __rte_ring_dequeue_elems(r, head, obj_table, esize, n);
302 __rte_ring_rts_update_tail(&r->rts_cons);
305 if (available != NULL)
306 *available = entries - n;
#define __rte_always_inline
static void rte_pause(void)