5 #ifndef _RTE_STACK_LF_C11_H_
6 #define _RTE_STACK_LF_C11_H_
12 __rte_stack_lf_count(
struct rte_stack *s)
29 return (
unsigned int)__atomic_load_n(&s->stack_lf.used.len,
34 __rte_stack_lf_push_elems(
struct rte_stack_lf_list *list,
35 struct rte_stack_lf_elem *first,
36 struct rte_stack_lf_elem *last,
39 #ifndef RTE_ARCH_X86_64
45 struct rte_stack_lf_head old_head;
48 old_head = list->head;
51 struct rte_stack_lf_head new_head;
57 __atomic_thread_fence(__ATOMIC_ACQUIRE);
63 new_head.cnt = old_head.cnt + 1;
65 last->next = old_head.top;
71 (rte_int128_t *)&list->head,
72 (rte_int128_t *)&old_head,
73 (rte_int128_t *)&new_head,
76 }
while (success == 0);
81 __atomic_add_fetch(&list->len, num, __ATOMIC_RELEASE);
86 __rte_stack_lf_pop_elems(
struct rte_stack_lf_list *list,
89 struct rte_stack_lf_elem **last)
91 #ifndef RTE_ARCH_X86_64
99 struct rte_stack_lf_head old_head;
104 len = __atomic_load_n(&list->len, __ATOMIC_ACQUIRE);
112 if (__atomic_compare_exchange_n(&list->len,
122 old_head = list->head;
126 struct rte_stack_lf_head new_head;
127 struct rte_stack_lf_elem *tmp;
134 __atomic_thread_fence(__ATOMIC_ACQUIRE);
145 for (i = 0; i < num && tmp != NULL; i++) {
148 obj_table[i] = tmp->data;
161 new_head.cnt = old_head.cnt + 1;
164 (rte_int128_t *)&list->head,
165 (rte_int128_t *)&old_head,
166 (rte_int128_t *)&new_head,
169 }
while (success == 0);
#define __rte_always_inline
static __rte_experimental int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static void rte_prefetch0(const volatile void *p)