5#ifndef _RTE_STACK_LF_C11_H_
6#define _RTE_STACK_LF_C11_H_
11#ifdef RTE_TOOLCHAIN_MSVC
24 const rte_int128_t *src,
29 return (
int)_InterlockedCompareExchange128(
30 (int64_t
volatile *) dst,
39__rte_stack_lf_count(
struct rte_stack *s)
56 return (
unsigned int)rte_atomic_load_explicit(&s->stack_lf.used.len,
57 rte_memory_order_relaxed);
61__rte_stack_lf_push_elems(
struct rte_stack_lf_list *list,
62 struct rte_stack_lf_elem *first,
63 struct rte_stack_lf_elem *last,
66 struct rte_stack_lf_head old_head;
69 old_head = list->head;
72 struct rte_stack_lf_head new_head;
78 new_head.cnt = old_head.cnt + 1;
80 last->next = old_head.top;
86 (rte_int128_t *)&list->head,
87 (rte_int128_t *)&old_head,
88 (rte_int128_t *)&new_head,
89 1, rte_memory_order_release,
90 rte_memory_order_relaxed);
91 }
while (success == 0);
96 rte_atomic_fetch_add_explicit(&list->len, num, rte_memory_order_release);
100__rte_stack_lf_pop_elems(
struct rte_stack_lf_list *list,
103 struct rte_stack_lf_elem **last)
105 struct rte_stack_lf_head old_head;
110 len = rte_atomic_load_explicit(&list->len, rte_memory_order_relaxed);
118 if (rte_atomic_compare_exchange_weak_explicit(&list->len,
120 rte_memory_order_acquire,
121 rte_memory_order_relaxed))
128 old_head = list->head;
132 struct rte_stack_lf_head new_head;
133 struct rte_stack_lf_elem *tmp;
151 for (i = 0; i < num && tmp != NULL; i++) {
154 obj_table[i] = tmp->data;
164 old_head = list->head;
169 new_head.cnt = old_head.cnt + 1;
186 (rte_int128_t *)&list->head,
187 (rte_int128_t *)&old_head,
188 (rte_int128_t *)&new_head,
189 0, rte_memory_order_relaxed,
190 rte_memory_order_relaxed);
191 }
while (success == 0);
static int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static void rte_atomic_thread_fence(rte_memory_order memorder)
#define __rte_always_inline
static void rte_prefetch0(const volatile void *p)