5#ifndef _RTE_STACK_LF_GENERIC_H_
6#define _RTE_STACK_LF_GENERIC_H_
12__rte_stack_lf_count(
struct rte_stack *s)
30 return rte_atomic_load_explicit(&s->stack_lf.used.len, rte_memory_order_seq_cst);
34__rte_stack_lf_push_elems(
struct rte_stack_lf_list *list,
35 struct rte_stack_lf_elem *first,
36 struct rte_stack_lf_elem *last,
39 struct rte_stack_lf_head old_head;
42 old_head = list->head;
45 struct rte_stack_lf_head new_head;
58 new_head.cnt = old_head.cnt + 1;
60 last->next = old_head.top;
64 (rte_int128_t *)&list->head,
65 (rte_int128_t *)&old_head,
66 (rte_int128_t *)&new_head,
67 1, rte_memory_order_release,
68 rte_memory_order_relaxed);
69 }
while (success == 0);
71 rte_atomic_fetch_add_explicit(&list->len, num, rte_memory_order_seq_cst);
75__rte_stack_lf_pop_elems(
struct rte_stack_lf_list *list,
78 struct rte_stack_lf_elem **last)
80 struct rte_stack_lf_head old_head;
86 uint64_t len = rte_atomic_load_explicit(&list->len, rte_memory_order_seq_cst);
93 if (rte_atomic_compare_exchange_strong_explicit(&list->len, &len, len - num,
94 rte_memory_order_seq_cst, rte_memory_order_seq_cst))
98 old_head = list->head;
102 struct rte_stack_lf_head new_head;
103 struct rte_stack_lf_elem *tmp;
121 for (i = 0; i < num && tmp != NULL; i++) {
124 obj_table[i] = tmp->data;
134 old_head = list->head;
139 new_head.cnt = old_head.cnt + 1;
143 (rte_int128_t *)&list->head,
144 (rte_int128_t *)&old_head,
145 (rte_int128_t *)&new_head,
146 1, rte_memory_order_release,
147 rte_memory_order_relaxed);
148 }
while (success == 0);
static int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static void rte_smp_mb(void)
#define __rte_always_inline
static void rte_prefetch0(const volatile void *p)