10#ifndef _RTE_RING_RTS_ELEM_PVT_H_
11#define _RTE_RING_RTS_ELEM_PVT_H_
25__rte_ring_rts_update_tail(
struct rte_ring_rts_headtail *ht)
27 union __rte_ring_rts_poscnt h, ot, nt;
45 ot.raw = rte_atomic_load_explicit(&ht->tail.raw, rte_memory_order_acquire);
49 h.raw = rte_atomic_load_explicit(&ht->head.raw, rte_memory_order_relaxed);
52 if (++nt.val.cnt == h.val.cnt)
53 nt.val.pos = h.val.pos;
59 }
while (rte_atomic_compare_exchange_strong_explicit(&ht->tail.raw,
60 (uint64_t *)(uintptr_t)&ot.raw, nt.raw,
61 rte_memory_order_release, rte_memory_order_acquire) == 0);
69__rte_ring_rts_head_wait(const struct rte_ring_rts_headtail *ht,
70 rte_memory_order memorder)
72 union __rte_ring_rts_poscnt h;
73 uint32_t max = ht->htd_max;
75 h.raw = rte_atomic_load_explicit(&ht->head.raw, memorder);
77 while (h.val.pos - ht->tail.val.pos > max) {
79 h.raw = rte_atomic_load_explicit(&ht->head.raw, memorder);
110__rte_ring_rts_move_head(
struct rte_ring_rts_headtail *d,
116 union __rte_ring_rts_poscnt nh, oh;
133 oh = __rte_ring_rts_head_wait(d, rte_memory_order_acquire);
140 stail = rte_atomic_load_explicit(&s->tail, rte_memory_order_acquire);
148 *entries = capacity + stail - oh.val.pos;
158 nh.val.pos = oh.val.pos + n;
159 nh.val.cnt = oh.val.cnt + 1;
169 }
while (rte_atomic_compare_exchange_strong_explicit(&d->head.raw,
170 (uint64_t *)(uintptr_t)&oh.raw, nh.raw,
171 rte_memory_order_release,
172 rte_memory_order_relaxed) == 0);
174 *old_head = oh.val.pos;
182__rte_ring_rts_move_prod_head(
struct rte_ring *r, uint32_t num,
184 uint32_t *free_entries)
186 return __rte_ring_rts_move_head(&r->rts_prod, &r->cons,
187 r->
capacity, num, behavior, old_head, free_entries);
194__rte_ring_rts_move_cons_head(
struct rte_ring *r, uint32_t num,
198 return __rte_ring_rts_move_head(&r->rts_cons, &r->prod,
199 0, num, behavior, old_head, entries);
225__rte_ring_do_rts_enqueue_elem(
struct rte_ring *r,
const void *obj_table,
227 uint32_t *free_space)
231 n = __rte_ring_rts_move_prod_head(r, n, behavior, &head, &free);
234 __rte_ring_enqueue_elems(r, head, obj_table, esize, n);
235 __rte_ring_rts_update_tail(&r->rts_prod);
238 if (free_space != NULL)
239 *free_space = free - n;
266__rte_ring_do_rts_dequeue_elem(
struct rte_ring *r,
void *obj_table,
270 uint32_t entries, head;
272 n = __rte_ring_rts_move_cons_head(r, n, behavior, &head, &entries);
275 __rte_ring_dequeue_elems(r, head, obj_table, esize, n);
276 __rte_ring_rts_update_tail(&r->rts_cons);
279 if (available != NULL)
280 *available = entries - n;
#define __rte_always_inline
static void rte_pause(void)